v8
V8 is Google’s open source high-performance JavaScript and WebAssembly engine, written in C++.
Loading...
Searching...
No Matches
builtins-ia32.cc
Go to the documentation of this file.
1// Copyright 2012 the V8 project authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
5#if V8_TARGET_ARCH_IA32
6
9#include "src/base/iterator.h"
14// For interpreter_entry_return_pc_offset. TODO(jkummerow): Drop.
17#include "src/debug/debug.h"
21#include "src/heap/heap-inl.h"
23#include "src/objects/cell.h"
24#include "src/objects/foreign.h"
28#include "src/objects/smi.h"
29
30#if V8_ENABLE_WEBASSEMBLY
35#endif // V8_ENABLE_WEBASSEMBLY
36
37namespace v8 {
38namespace internal {
39
40#define __ ACCESS_MASM(masm)
41
42void Builtins::Generate_Adaptor(MacroAssembler* masm,
43 int formal_parameter_count, Address address) {
45 Immediate(ExternalReference::Create(address)));
46 __ TailCallBuiltin(
47 Builtins::AdaptorWithBuiltinExitFrame(formal_parameter_count));
48}
49
50namespace {
51
52constexpr int kReceiverOnStackSize = kSystemPointerSize;
53
54enum class ArgumentsElementType {
55 kRaw, // Push arguments as they are.
56 kHandle // Dereference arguments before pushing.
57};
58
59void Generate_PushArguments(MacroAssembler* masm, Register array, Register argc,
60 Register scratch1, Register scratch2,
61 ArgumentsElementType element_type) {
62 DCHECK(!AreAliased(array, argc, scratch1, scratch2));
63 Register counter = scratch1;
64 Label loop, entry;
65 __ lea(counter, Operand(argc, -kJSArgcReceiverSlots));
66 __ jmp(&entry);
67 __ bind(&loop);
68 Operand value(array, counter, times_system_pointer_size, 0);
69 if (element_type == ArgumentsElementType::kHandle) {
70 DCHECK(scratch2 != no_reg);
71 __ mov(scratch2, value);
72 value = Operand(scratch2, 0);
73 }
74 __ Push(value);
75 __ bind(&entry);
76 __ dec(counter);
77 __ j(greater_equal, &loop, Label::kNear);
78}
79
80void Generate_JSBuiltinsConstructStubHelper(MacroAssembler* masm) {
81 // ----------- S t a t e -------------
82 // -- eax: number of arguments
83 // -- edi: constructor function
84 // -- edx: new target
85 // -- esi: context
86 // -----------------------------------
87
88 Label stack_overflow;
89
90 __ StackOverflowCheck(eax, ecx, &stack_overflow);
91
92 // Enter a construct frame.
93 {
94 FrameScope scope(masm, StackFrame::CONSTRUCT);
95
96 // Preserve the incoming parameters on the stack.
97 __ push(esi);
98 __ push(eax);
99
100 // TODO(victorgomes): When the arguments adaptor is completely removed, we
101 // should get the formal parameter count and copy the arguments in its
102 // correct position (including any undefined), instead of delaying this to
103 // InvokeFunction.
104
105 // Set up pointer to first argument (skip receiver).
108 // Copy arguments to the expression stack.
109 // esi: Pointer to start of arguments.
110 // eax: Number of arguments.
111 Generate_PushArguments(masm, esi, eax, ecx, no_reg,
112 ArgumentsElementType::kRaw);
113 // The receiver for the builtin/api call.
114 __ PushRoot(RootIndex::kTheHoleValue);
115
116 // Call the function.
117 // eax: number of arguments (untagged)
118 // edi: constructor function
119 // edx: new target
120 // Reload context from the frame.
121 __ mov(esi, Operand(ebp, ConstructFrameConstants::kContextOffset));
122 __ InvokeFunction(edi, edx, eax, InvokeType::kCall);
123
124 // Restore context from the frame.
125 __ mov(esi, Operand(ebp, ConstructFrameConstants::kContextOffset));
126 // Restore arguments count from the frame.
127 __ mov(edx, Operand(ebp, ConstructFrameConstants::kLengthOffset));
128 // Leave construct frame.
129 }
130
131 // Remove caller arguments from the stack and return.
132 __ DropArguments(edx, ecx);
133 __ ret(0);
134
135 __ bind(&stack_overflow);
136 {
137 FrameScope scope(masm, StackFrame::INTERNAL);
138 __ CallRuntime(Runtime::kThrowStackOverflow);
139 __ int3(); // This should be unreachable.
140 }
141}
142
143} // namespace
144
145// The construct stub for ES5 constructor functions and ES6 class constructors.
146void Builtins::Generate_JSConstructStubGeneric(MacroAssembler* masm) {
147 // ----------- S t a t e -------------
148 // -- eax: number of arguments (untagged)
149 // -- edi: constructor function
150 // -- edx: new target
151 // -- esi: context
152 // -- sp[...]: constructor arguments
153 // -----------------------------------
154
155 FrameScope scope(masm, StackFrame::MANUAL);
156 // Enter a construct frame.
157 __ EnterFrame(StackFrame::CONSTRUCT);
158
159 Label post_instantiation_deopt_entry, not_create_implicit_receiver;
160
161 // Preserve the incoming parameters on the stack.
162 __ Push(esi);
163 __ Push(eax);
164 __ Push(edi);
165 __ PushRoot(RootIndex::kTheHoleValue);
166 __ Push(edx);
167
168 // ----------- S t a t e -------------
169 // -- sp[0*kSystemPointerSize]: new target
170 // -- sp[1*kSystemPointerSize]: padding
171 // -- edi and sp[2*kSystemPointerSize]: constructor function
172 // -- sp[3*kSystemPointerSize]: argument count
173 // -- sp[4*kSystemPointerSize]: context
174 // -----------------------------------
175
176 __ mov(eax, FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset));
177 __ mov(eax, FieldOperand(eax, SharedFunctionInfo::kFlagsOffset));
178 __ DecodeField<SharedFunctionInfo::FunctionKindBits>(eax);
179 __ JumpIfIsInRange(
180 eax, static_cast<uint32_t>(FunctionKind::kDefaultDerivedConstructor),
181 static_cast<uint32_t>(FunctionKind::kDerivedConstructor), ecx,
182 &not_create_implicit_receiver, Label::kNear);
183
184 // If not derived class constructor: Allocate the new receiver object.
185 __ CallBuiltin(Builtin::kFastNewObject);
186 __ jmp(&post_instantiation_deopt_entry, Label::kNear);
187
188 // Else: use TheHoleValue as receiver for constructor call
189 __ bind(&not_create_implicit_receiver);
190 __ LoadRoot(eax, RootIndex::kTheHoleValue);
191
192 // ----------- S t a t e -------------
193 // -- eax: implicit receiver
194 // -- Slot 4 / sp[0*kSystemPointerSize]: new target
195 // -- Slot 3 / sp[1*kSystemPointerSize]: padding
196 // -- Slot 2 / sp[2*kSystemPointerSize]: constructor function
197 // -- Slot 1 / sp[3*kSystemPointerSize]: number of arguments
198 // -- Slot 0 / sp[4*kSystemPointerSize]: context
199 // -----------------------------------
200 // Deoptimizer enters here.
201 masm->isolate()->heap()->SetConstructStubCreateDeoptPCOffset(
202 masm->pc_offset());
203 __ bind(&post_instantiation_deopt_entry);
204
205 // Restore new target.
206 __ Pop(edx);
207
208 // Push the allocated receiver to the stack.
209 __ Push(eax);
210
211 // We need two copies because we may have to return the original one
212 // and the calling conventions dictate that the called function pops the
213 // receiver. The second copy is pushed after the arguments, we saved in xmm0
214 // since eax needs to store the number of arguments before
215 // InvokingFunction.
216 __ movd(xmm0, eax);
217
218 // Set up pointer to first argument (skip receiver).
221
222 // Restore argument count.
223 __ mov(eax, Operand(ebp, ConstructFrameConstants::kLengthOffset));
224
225 // Check if we have enough stack space to push all arguments.
226 // Argument count in eax. Clobbers ecx.
227 Label stack_overflow;
228 __ StackOverflowCheck(eax, ecx, &stack_overflow);
229
230 // TODO(victorgomes): When the arguments adaptor is completely removed, we
231 // should get the formal parameter count and copy the arguments in its
232 // correct position (including any undefined), instead of delaying this to
233 // InvokeFunction.
234
235 // Copy arguments to the expression stack.
236 // edi: Pointer to start of arguments.
237 // eax: Number of arguments.
238 Generate_PushArguments(masm, edi, eax, ecx, no_reg,
239 ArgumentsElementType::kRaw);
240
241 // Push implicit receiver.
242 __ movd(ecx, xmm0);
243 __ Push(ecx);
244
245 // Restore and and call the constructor function.
246 __ mov(edi, Operand(ebp, ConstructFrameConstants::kConstructorOffset));
247 __ InvokeFunction(edi, edx, eax, InvokeType::kCall);
248
249 // If the result is an object (in the ECMA sense), we should get rid
250 // of the receiver and use the result; see ECMA-262 section 13.2.2-7
251 // on page 74.
252
253 Label check_result, use_receiver, do_throw, leave_and_return;
254 // If the result is undefined, we jump out to using the implicit receiver.
255 __ JumpIfNotRoot(eax, RootIndex::kUndefinedValue, &check_result,
257
258 // Throw away the result of the constructor invocation and use the
259 // on-stack receiver as the result.
260 __ bind(&use_receiver);
261 __ mov(eax, Operand(esp, 0 * kSystemPointerSize));
262 __ JumpIfRoot(eax, RootIndex::kTheHoleValue, &do_throw);
263
264 __ bind(&leave_and_return);
265 // Restore arguments count from the frame.
266 __ mov(edx, Operand(ebp, ConstructFrameConstants::kLengthOffset));
267 __ LeaveFrame(StackFrame::CONSTRUCT);
268
269 // Remove caller arguments from the stack and return.
270 __ DropArguments(edx, ecx);
271 __ ret(0);
272
273 // Otherwise we do a smi check and fall through to check if the return value
274 // is a valid receiver.
275 __ bind(&check_result);
276
277 // If the result is a smi, it is *not* an object in the ECMA sense.
278 __ JumpIfSmi(eax, &use_receiver, Label::kNear);
279
280 // If the type of the result (stored in its map) is less than
281 // FIRST_JS_RECEIVER_TYPE, it is not an object in the ECMA sense.
282 static_assert(LAST_JS_RECEIVER_TYPE == LAST_TYPE);
283 __ CmpObjectType(eax, FIRST_JS_RECEIVER_TYPE, ecx);
284 __ j(above_equal, &leave_and_return, Label::kNear);
285 __ jmp(&use_receiver, Label::kNear);
286
287 __ bind(&do_throw);
288 // Restore context from the frame.
289 __ mov(esi, Operand(ebp, ConstructFrameConstants::kContextOffset));
290 __ CallRuntime(Runtime::kThrowConstructorReturnedNonObject);
291 // This should be unreachable.
292 __ int3();
293
294 __ bind(&stack_overflow);
295 // Restore context from the frame.
296 __ mov(esi, Operand(ebp, ConstructFrameConstants::kContextOffset));
297 __ CallRuntime(Runtime::kThrowStackOverflow);
298 // This should be unreachable.
299 __ int3();
300}
301
302void Builtins::Generate_JSBuiltinsConstructStub(MacroAssembler* masm) {
303 Generate_JSBuiltinsConstructStubHelper(masm);
304}
305
306void Builtins::Generate_ConstructedNonConstructable(MacroAssembler* masm) {
307 FrameScope scope(masm, StackFrame::INTERNAL);
308 __ push(edi);
309 __ CallRuntime(Runtime::kThrowConstructedNonConstructable);
310}
311
312namespace {
313
314// Called with the native C calling convention. The corresponding function
315// signature is either:
316//
317// using JSEntryFunction = GeneratedCode<Address(
318// Address root_register_value, Address new_target, Address target,
319// Address receiver, intptr_t argc, Address** argv)>;
320// or
321// using JSEntryFunction = GeneratedCode<Address(
322// Address root_register_value, MicrotaskQueue* microtask_queue)>;
323void Generate_JSEntryVariant(MacroAssembler* masm, StackFrame::Type type,
324 Builtin entry_trampoline) {
325 Label invoke, handler_entry, exit;
326 Label not_outermost_js, not_outermost_js_2;
327
328 {
329 NoRootArrayScope uninitialized_root_register(masm);
330
331 // Set up frame.
332 __ push(ebp);
333 __ mov(ebp, esp);
334
335 // Push marker in two places.
336 __ push(Immediate(StackFrame::TypeToMarker(type)));
337 // Reserve a slot for the context. It is filled after the root register has
338 // been set up.
339 __ AllocateStackSpace(kSystemPointerSize);
340 // Save callee-saved registers (C calling conventions).
341 __ push(edi);
342 __ push(esi);
343 __ push(ebx);
344
345 // Initialize the root register based on the given Isolate* argument.
346 // C calling convention. The first argument is passed on the stack.
347 __ mov(kRootRegister,
349 }
350
351 // Save copies of the top frame descriptor on the stack.
352 ExternalReference c_entry_fp = ExternalReference::Create(
353 IsolateAddressId::kCEntryFPAddress, masm->isolate());
354 __ push(__ ExternalReferenceAsOperand(c_entry_fp, edi));
355
356 __ push(__ ExternalReferenceAsOperand(IsolateFieldId::kFastCCallCallerFP));
357
358 __ push(__ ExternalReferenceAsOperand(IsolateFieldId::kFastCCallCallerPC));
359
360 // Clear c_entry_fp, now we've pushed its previous value to the stack.
361 // If the c_entry_fp is not already zero and we don't clear it, the
362 // StackFrameIteratorForProfiler will assume we are executing C++ and miss the
363 // JS frames on top.
364 __ mov(__ ExternalReferenceAsOperand(c_entry_fp, edi), Immediate(0));
365 __ mov(__ ExternalReferenceAsOperand(IsolateFieldId::kFastCCallCallerFP),
366 Immediate(0));
367 __ mov(__ ExternalReferenceAsOperand(IsolateFieldId::kFastCCallCallerPC),
368 Immediate(0));
369
370 // Store the context address in the previously-reserved slot.
371 ExternalReference context_address = ExternalReference::Create(
372 IsolateAddressId::kContextAddress, masm->isolate());
373 __ mov(edi, __ ExternalReferenceAsOperand(context_address, edi));
374 static constexpr int kOffsetToContextSlot = -2 * kSystemPointerSize;
375 __ mov(Operand(ebp, kOffsetToContextSlot), edi);
376
377 // If this is the outermost JS call, set js_entry_sp value.
378 ExternalReference js_entry_sp = ExternalReference::Create(
379 IsolateAddressId::kJSEntrySPAddress, masm->isolate());
380 __ cmp(__ ExternalReferenceAsOperand(js_entry_sp, edi), Immediate(0));
381 __ j(not_equal, &not_outermost_js, Label::kNear);
382 __ mov(__ ExternalReferenceAsOperand(js_entry_sp, edi), ebp);
384 __ jmp(&invoke, Label::kNear);
385 __ bind(&not_outermost_js);
387
388 // Jump to a faked try block that does the invoke, with a faked catch
389 // block that sets the exception.
390 __ jmp(&invoke);
391 __ bind(&handler_entry);
392
393 // Store the current pc as the handler offset. It's used later to create the
394 // handler table.
395 masm->isolate()->builtins()->SetJSEntryHandlerOffset(handler_entry.pos());
396
397 // Caught exception: Store result (exception) in the exception
398 // field in the JSEnv and return a failure sentinel.
399 ExternalReference exception = ExternalReference::Create(
400 IsolateAddressId::kExceptionAddress, masm->isolate());
401 __ mov(__ ExternalReferenceAsOperand(exception, edi), eax);
402
403 __ Move(eax, masm->isolate()->factory()->exception());
404 __ jmp(&exit);
405
406 // Invoke: Link this frame into the handler chain.
407 __ bind(&invoke);
408 __ PushStackHandler(edi);
409
410 // Invoke the function by calling through JS entry trampoline builtin and
411 // pop the faked function when we return.
412 __ CallBuiltin(entry_trampoline);
413
414 // Unlink this frame from the handler chain.
415 __ PopStackHandler(edi);
416
417 __ bind(&exit);
418
419 // Check if the current stack frame is marked as the outermost JS frame.
420 __ pop(edi);
421 __ cmp(edi, Immediate(StackFrame::OUTERMOST_JSENTRY_FRAME));
422 __ j(not_equal, &not_outermost_js_2);
423 __ mov(__ ExternalReferenceAsOperand(js_entry_sp, edi), Immediate(0));
424 __ bind(&not_outermost_js_2);
425
426 // Restore the top frame descriptor from the stack.
427 __ pop(__ ExternalReferenceAsOperand(IsolateFieldId::kFastCCallCallerPC));
428 __ pop(__ ExternalReferenceAsOperand(IsolateFieldId::kFastCCallCallerFP));
429 __ pop(__ ExternalReferenceAsOperand(c_entry_fp, edi));
430
431 // Restore callee-saved registers (C calling conventions).
432 __ pop(ebx);
433 __ pop(esi);
434 __ pop(edi);
435 __ add(esp, Immediate(2 * kSystemPointerSize)); // remove markers
436
437 // Restore frame pointer and return.
438 __ pop(ebp);
439 __ ret(0);
440}
441
442} // namespace
443
444void Builtins::Generate_JSEntry(MacroAssembler* masm) {
445 Generate_JSEntryVariant(masm, StackFrame::ENTRY, Builtin::kJSEntryTrampoline);
446}
447
448void Builtins::Generate_JSConstructEntry(MacroAssembler* masm) {
449 Generate_JSEntryVariant(masm, StackFrame::CONSTRUCT_ENTRY,
450 Builtin::kJSConstructEntryTrampoline);
451}
452
453void Builtins::Generate_JSRunMicrotasksEntry(MacroAssembler* masm) {
454 Generate_JSEntryVariant(masm, StackFrame::ENTRY,
455 Builtin::kRunMicrotasksTrampoline);
456}
457
458static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm,
459 bool is_construct) {
460 {
461 FrameScope scope(masm, StackFrame::INTERNAL);
462
463 const Register scratch1 = edx;
464 const Register scratch2 = edi;
465
466 // Setup the context (we need to use the caller context from the isolate).
467 ExternalReference context_address = ExternalReference::Create(
468 IsolateAddressId::kContextAddress, masm->isolate());
469 __ mov(esi, __ ExternalReferenceAsOperand(context_address, scratch1));
470
471 // Load the previous frame pointer (edx) to access C arguments
472 __ mov(scratch1, Operand(ebp, 0));
473
474 // Push the function.
476
477 // Load the number of arguments and setup pointer to the arguments.
478 __ mov(eax, Operand(scratch1, EntryFrameConstants::kArgcOffset));
479 __ mov(scratch1, Operand(scratch1, EntryFrameConstants::kArgvOffset));
480
481 // Check if we have enough stack space to push all arguments.
482 // Argument count in eax. Clobbers ecx.
483 Label enough_stack_space, stack_overflow;
484 __ StackOverflowCheck(eax, ecx, &stack_overflow);
485 __ jmp(&enough_stack_space);
486
487 __ bind(&stack_overflow);
488 __ CallRuntime(Runtime::kThrowStackOverflow);
489 // This should be unreachable.
490 __ int3();
491
492 __ bind(&enough_stack_space);
493
494 // Copy arguments to the stack.
495 // scratch1 (edx): Pointer to start of arguments.
496 // eax: Number of arguments.
497 Generate_PushArguments(masm, scratch1, eax, ecx, scratch2,
498 ArgumentsElementType::kHandle);
499
500 // Load the previous frame pointer to access C arguments
501 __ mov(scratch2, Operand(ebp, 0));
502
503 // Push the receiver onto the stack.
505
506 // Get the new.target and function from the frame.
507 __ mov(edx, Operand(scratch2, EntryFrameConstants::kNewTargetArgOffset));
508 __ mov(edi, Operand(scratch2, EntryFrameConstants::kFunctionArgOffset));
509
510 // Invoke the code.
511 Builtin builtin = is_construct ? Builtin::kConstruct : Builtins::Call();
512 __ CallBuiltin(builtin);
513
514 // Exit the internal frame. Notice that this also removes the empty.
515 // context and the function left on the stack by the code
516 // invocation.
517 }
518 __ ret(0);
519}
520
521void Builtins::Generate_JSEntryTrampoline(MacroAssembler* masm) {
523}
524
525void Builtins::Generate_JSConstructEntryTrampoline(MacroAssembler* masm) {
527}
528
529void Builtins::Generate_RunMicrotasksTrampoline(MacroAssembler* masm) {
530 // This expects two C++ function parameters passed by Invoke() in
531 // execution.cc.
532 // r1: microtask_queue
535 __ TailCallBuiltin(Builtin::kRunMicrotasks);
536}
537
538static void GetSharedFunctionInfoBytecode(MacroAssembler* masm,
539 Register sfi_data,
540 Register scratch1) {
541 Label done;
542
543 __ CmpObjectType(sfi_data, INTERPRETER_DATA_TYPE, scratch1);
544 __ j(not_equal, &done, Label::kNear);
545 __ mov(sfi_data,
546 FieldOperand(sfi_data, InterpreterData::kBytecodeArrayOffset));
547
548 __ bind(&done);
549}
550
551static void AssertCodeIsBaseline(MacroAssembler* masm, Register code,
552 Register scratch) {
553 DCHECK(!AreAliased(code, scratch));
554 // Verify that the code kind is baseline code via the CodeKind.
555 __ mov(scratch, FieldOperand(code, Code::kFlagsOffset));
556 __ DecodeField<Code::KindField>(scratch);
557 __ cmp(scratch, Immediate(static_cast<int>(CodeKind::BASELINE)));
558 __ Assert(equal, AbortReason::kExpectedBaselineData);
559}
560
562 MacroAssembler* masm, Register sfi, Register bytecode, Register scratch1,
563 Label* is_baseline, Label* is_unavailable) {
564 ASM_CODE_COMMENT(masm);
565 Label done;
566
567 Register data = bytecode;
568 __ mov(data,
569 FieldOperand(sfi, SharedFunctionInfo::kTrustedFunctionDataOffset));
570
571 __ LoadMap(scratch1, data);
572
573#ifndef V8_JITLESS
574 __ CmpInstanceType(scratch1, CODE_TYPE);
575 if (v8_flags.debug_code) {
576 Label not_baseline;
577 __ j(not_equal, &not_baseline);
578 AssertCodeIsBaseline(masm, data, scratch1);
579 __ j(equal, is_baseline);
580 __ bind(&not_baseline);
581 } else {
582 __ j(equal, is_baseline);
583 }
584#endif // !V8_JITLESS
585
586 __ CmpInstanceType(scratch1, BYTECODE_ARRAY_TYPE);
587 __ j(equal, &done, Label::kNear);
588
589 __ CmpInstanceType(scratch1, INTERPRETER_DATA_TYPE);
590 __ j(not_equal, is_unavailable);
591 __ mov(data, FieldOperand(data, InterpreterData::kBytecodeArrayOffset));
592
593 __ bind(&done);
594}
595
596// static
597void Builtins::Generate_ResumeGeneratorTrampoline(MacroAssembler* masm) {
598 // ----------- S t a t e -------------
599 // -- eax : the value to pass to the generator
600 // -- edx : the JSGeneratorObject to resume
601 // -- esp[0] : return address
602 // -----------------------------------
603 // Store input value into generator object.
604 __ mov(FieldOperand(edx, JSGeneratorObject::kInputOrDebugPosOffset), eax);
606 __ mov(object, edx);
607 __ RecordWriteField(object, JSGeneratorObject::kInputOrDebugPosOffset, eax,
610 // Check that edx is still valid, RecordWrite might have clobbered it.
611 __ AssertGeneratorObject(edx);
612
613 // Load suspended function and context.
614 __ mov(edi, FieldOperand(edx, JSGeneratorObject::kFunctionOffset));
615 __ mov(esi, FieldOperand(edi, JSFunction::kContextOffset));
616
617 // Flood function if we are stepping.
618 Label prepare_step_in_if_stepping, prepare_step_in_suspended_generator;
619 Label stepping_prepared;
620 ExternalReference debug_hook =
621 ExternalReference::debug_hook_on_function_call_address(masm->isolate());
622 __ cmpb(__ ExternalReferenceAsOperand(debug_hook, ecx), Immediate(0));
623 __ j(not_equal, &prepare_step_in_if_stepping);
624
625 // Flood function if we need to continue stepping in the suspended generator.
626 ExternalReference debug_suspended_generator =
627 ExternalReference::debug_suspended_generator_address(masm->isolate());
628 __ cmp(edx, __ ExternalReferenceAsOperand(debug_suspended_generator, ecx));
629 __ j(equal, &prepare_step_in_suspended_generator);
630 __ bind(&stepping_prepared);
631
632 // Check the stack for overflow. We are not trying to catch interruptions
633 // (i.e. debug break and preemption) here, so check the "real stack limit".
634 Label stack_overflow;
635 __ CompareStackLimit(esp, StackLimitKind::kRealStackLimit);
636 __ j(below, &stack_overflow);
637
638 // Pop return address.
639 __ PopReturnAddressTo(eax);
640
641 // ----------- S t a t e -------------
642 // -- eax : return address
643 // -- edx : the JSGeneratorObject to resume
644 // -- edi : generator function
645 // -- esi : generator context
646 // -----------------------------------
647
648 {
649 __ movd(xmm0, ebx);
650
651 // Copy the function arguments from the generator object's register file.
652 // TODO(olivf, 40931165): Load the parameter count from the JSDispatchTable.
653 __ mov(ecx, FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset));
654 __ movzx_w(ecx, FieldOperand(
655 ecx, SharedFunctionInfo::kFormalParameterCountOffset));
656 __ dec(ecx); // Exclude receiver.
657 __ mov(ebx,
658 FieldOperand(edx, JSGeneratorObject::kParametersAndRegistersOffset));
659 {
660 Label done_loop, loop;
661 __ bind(&loop);
662 __ dec(ecx);
663 __ j(less, &done_loop);
664 __ Push(FieldOperand(ebx, ecx, times_tagged_size,
665 OFFSET_OF_DATA_START(FixedArray)));
666 __ jmp(&loop);
667 __ bind(&done_loop);
668 }
669
670 // Push receiver.
671 __ Push(FieldOperand(edx, JSGeneratorObject::kReceiverOffset));
672
673 // Restore registers.
674 __ mov(edi, FieldOperand(edx, JSGeneratorObject::kFunctionOffset));
675 __ movd(ebx, xmm0);
676 }
677
678 // Underlying function needs to have bytecode available.
679 if (v8_flags.debug_code) {
680 Label is_baseline, is_unavailable, ok;
681 __ mov(ecx, FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset));
682 __ Push(eax);
683 GetSharedFunctionInfoBytecodeOrBaseline(masm, ecx, ecx, eax, &is_baseline,
684 &is_unavailable);
685 __ Pop(eax);
686 __ jmp(&ok);
687
688 __ bind(&is_unavailable);
689 __ Abort(AbortReason::kMissingBytecodeArray);
690
691 __ bind(&is_baseline);
692 __ Pop(eax);
693 __ CmpObjectType(ecx, CODE_TYPE, ecx);
694 __ Assert(equal, AbortReason::kMissingBytecodeArray);
695
696 __ bind(&ok);
697 }
698
699 // Resume (Ignition/TurboFan) generator object.
700 {
701 __ PushReturnAddressFrom(eax);
702 __ mov(eax, FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset));
703 __ movzx_w(eax, FieldOperand(
704 eax, SharedFunctionInfo::kFormalParameterCountOffset));
705 // We abuse new.target both to indicate that this is a resume call and to
706 // pass in the generator object. In ordinary calls, new.target is always
707 // undefined because generator functions are non-constructable.
708 __ JumpJSFunction(edi);
709 }
710
711 __ bind(&prepare_step_in_if_stepping);
712 {
713 FrameScope scope(masm, StackFrame::INTERNAL);
714 __ Push(edx);
715 __ Push(edi);
716 // Push hole as receiver since we do not use it for stepping.
717 __ PushRoot(RootIndex::kTheHoleValue);
718 __ CallRuntime(Runtime::kDebugOnFunctionCall);
719 __ Pop(edx);
720 __ mov(edi, FieldOperand(edx, JSGeneratorObject::kFunctionOffset));
721 }
722 __ jmp(&stepping_prepared);
723
724 __ bind(&prepare_step_in_suspended_generator);
725 {
726 FrameScope scope(masm, StackFrame::INTERNAL);
727 __ Push(edx);
728 __ CallRuntime(Runtime::kDebugPrepareStepInSuspendedGenerator);
729 __ Pop(edx);
730 __ mov(edi, FieldOperand(edx, JSGeneratorObject::kFunctionOffset));
731 }
732 __ jmp(&stepping_prepared);
733
734 __ bind(&stack_overflow);
735 {
736 FrameScope scope(masm, StackFrame::INTERNAL);
737 __ CallRuntime(Runtime::kThrowStackOverflow);
738 __ int3(); // This should be unreachable.
739 }
740}
741
742static void LeaveInterpreterFrame(MacroAssembler* masm, Register scratch1,
743 Register scratch2) {
744 ASM_CODE_COMMENT(masm);
745 Register params_size = scratch1;
746 // Get the size of the formal parameters (in bytes).
747 __ mov(params_size,
749 __ movzx_w(params_size,
750 FieldOperand(params_size, BytecodeArray::kParameterSizeOffset));
751
752 Register actual_params_size = scratch2;
753 // Compute the size of the actual parameters (in bytes).
754 __ mov(actual_params_size, Operand(ebp, StandardFrameConstants::kArgCOffset));
755
756 // If actual is bigger than formal, then we should use it to free up the stack
757 // arguments.
758 __ cmp(params_size, actual_params_size);
759 __ cmov(kLessThan, params_size, actual_params_size);
760
761 // Leave the frame (also dropping the register file).
762 __ leave();
763
764 // Drop receiver + arguments.
765 __ DropArguments(params_size, scratch2);
766}
767
768// Advance the current bytecode offset. This simulates what all bytecode
769// handlers do upon completion of the underlying operation. Will bail out to a
770// label if the bytecode (without prefix) is a return bytecode. Will not advance
771// the bytecode offset if the current bytecode is a JumpLoop, instead just
772// re-executing the JumpLoop to jump to the correct bytecode.
773static void AdvanceBytecodeOffsetOrReturn(MacroAssembler* masm,
774 Register bytecode_array,
775 Register bytecode_offset,
776 Register scratch1, Register scratch2,
777 Register scratch3, Label* if_return) {
778 ASM_CODE_COMMENT(masm);
779 Register bytecode_size_table = scratch1;
780 Register bytecode = scratch2;
781
782 // The bytecode offset value will be increased by one in wide and extra wide
783 // cases. In the case of having a wide or extra wide JumpLoop bytecode, we
784 // will restore the original bytecode. In order to simplify the code, we have
785 // a backup of it.
786 Register original_bytecode_offset = scratch3;
787 DCHECK(!AreAliased(bytecode_array, bytecode_offset, bytecode_size_table,
788 bytecode, original_bytecode_offset));
789 __ Move(bytecode_size_table,
790 Immediate(ExternalReference::bytecode_size_table_address()));
791
792 // Load the current bytecode.
793 __ movzx_b(bytecode, Operand(bytecode_array, bytecode_offset, times_1, 0));
794 __ Move(original_bytecode_offset, bytecode_offset);
795
796 // Check if the bytecode is a Wide or ExtraWide prefix bytecode.
797 Label process_bytecode, extra_wide;
798 static_assert(0 == static_cast<int>(interpreter::Bytecode::kWide));
799 static_assert(1 == static_cast<int>(interpreter::Bytecode::kExtraWide));
800 static_assert(2 == static_cast<int>(interpreter::Bytecode::kDebugBreakWide));
801 static_assert(3 ==
802 static_cast<int>(interpreter::Bytecode::kDebugBreakExtraWide));
803 __ cmp(bytecode, Immediate(0x3));
804 __ j(above, &process_bytecode, Label::kNear);
805 // The code to load the next bytecode is common to both wide and extra wide.
806 // We can hoist them up here. inc has to happen before test since it
807 // modifies the ZF flag.
808 __ inc(bytecode_offset);
809 __ test(bytecode, Immediate(0x1));
810 __ movzx_b(bytecode, Operand(bytecode_array, bytecode_offset, times_1, 0));
811 __ j(not_equal, &extra_wide, Label::kNear);
812
813 // Load the next bytecode and update table to the wide scaled table.
814 __ add(bytecode_size_table,
816 __ jmp(&process_bytecode, Label::kNear);
817
818 __ bind(&extra_wide);
819 // Update table to the extra wide scaled table.
820 __ add(bytecode_size_table,
822
823 __ bind(&process_bytecode);
824
825// Bailout to the return label if this is a return bytecode.
826#define JUMP_IF_EQUAL(NAME) \
827 __ cmp(bytecode, \
828 Immediate(static_cast<int>(interpreter::Bytecode::k##NAME))); \
829 __ j(equal, if_return);
831#undef JUMP_IF_EQUAL
832
833 // If this is a JumpLoop, re-execute it to perform the jump to the beginning
834 // of the loop.
835 Label end, not_jump_loop;
836 __ cmp(bytecode,
837 Immediate(static_cast<int>(interpreter::Bytecode::kJumpLoop)));
838 __ j(not_equal, &not_jump_loop, Label::kNear);
839 // If this is a wide or extra wide JumpLoop, we need to restore the original
840 // bytecode_offset since we might have increased it to skip the wide /
841 // extra-wide prefix bytecode.
842 __ Move(bytecode_offset, original_bytecode_offset);
843 __ jmp(&end, Label::kNear);
844
845 __ bind(&not_jump_loop);
846 // Otherwise, load the size of the current bytecode and advance the offset.
847 __ movzx_b(bytecode_size_table,
848 Operand(bytecode_size_table, bytecode, times_1, 0));
849 __ add(bytecode_offset, bytecode_size_table);
850
851 __ bind(&end);
852}
853
854namespace {
855
856void ResetSharedFunctionInfoAge(MacroAssembler* masm, Register sfi) {
857 __ mov_w(FieldOperand(sfi, SharedFunctionInfo::kAgeOffset), Immediate(0));
858}
859
860void ResetJSFunctionAge(MacroAssembler* masm, Register js_function,
861 Register scratch) {
862 const Register shared_function_info(scratch);
863 __ Move(shared_function_info,
864 FieldOperand(js_function, JSFunction::kSharedFunctionInfoOffset));
865 ResetSharedFunctionInfoAge(masm, shared_function_info);
866}
867
868void ResetFeedbackVectorOsrUrgency(MacroAssembler* masm,
869 Register feedback_vector, Register scratch) {
870 __ mov_b(scratch,
871 FieldOperand(feedback_vector, FeedbackVector::kOsrStateOffset));
872 __ and_(scratch, Immediate(~FeedbackVector::OsrUrgencyBits::kMask));
873 __ mov_b(FieldOperand(feedback_vector, FeedbackVector::kOsrStateOffset),
874 scratch);
875}
876
877} // namespace
878
879// Generate code for entering a JS function with the interpreter.
880// On entry to the function the receiver and arguments have been pushed on the
881// stack left to right.
882//
883// The live registers are:
884// o eax: actual argument count
885// o edi: the JS function object being called
886// o edx: the incoming new target or generator object
887// o esi: our context
888// o ebp: the caller's frame pointer
889// o esp: stack pointer (pointing to return address)
890//
891// The function builds an interpreter frame. See InterpreterFrameConstants in
892// frame-constants.h for its layout.
894 MacroAssembler* masm, InterpreterEntryTrampolineMode mode) {
895 __ movd(xmm0, eax); // Spill actual argument count.
896
897 __ mov(ecx, FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset));
898
899 // The bytecode array could have been flushed from the shared function info,
900 // if so, call into CompileLazy.
901 Label is_baseline, compile_lazy;
902 GetSharedFunctionInfoBytecodeOrBaseline(masm, ecx, ecx, eax, &is_baseline,
903 &compile_lazy);
904
905 Label push_stack_frame;
906 Register feedback_vector = ecx;
907 Register closure = edi;
908 Register scratch = eax;
909 __ LoadFeedbackVector(feedback_vector, closure, scratch, &push_stack_frame,
911
912#ifndef V8_JITLESS
913#ifndef V8_ENABLE_LEAPTIERING
914 // If feedback vector is valid, check for optimized code and update invocation
915 // count. Load the optimization state from the feedback vector and reuse the
916 // register.
917 Label flags_need_processing;
918 Register flags = ecx;
919 XMMRegister saved_feedback_vector = xmm1;
920 __ LoadFeedbackVectorFlagsAndJumpIfNeedsProcessing(
921 flags, saved_feedback_vector, CodeKind::INTERPRETED_FUNCTION,
922 &flags_need_processing);
923
924 // Reload the feedback vector.
925 __ movd(feedback_vector, saved_feedback_vector);
926#endif // !V8_ENABLE_LEAPTIERING
927
928 ResetFeedbackVectorOsrUrgency(masm, feedback_vector, scratch);
929
930 // Increment the invocation count.
931 __ inc(FieldOperand(feedback_vector, FeedbackVector::kInvocationCountOffset));
932
933 // Open a frame scope to indicate that there is a frame on the stack. The
934 // MANUAL indicates that the scope shouldn't actually generate code to set
935 // up the frame (that is done below).
936#else
937 // Note: By omitting the above code in jitless mode we also disable:
938 // - kFlagsLogNextExecution: only used for logging/profiling; and
939 // - kInvocationCountOffset: only used for tiering heuristics and code
940 // coverage.
941#endif // !V8_JITLESS
942
943 __ bind(&push_stack_frame);
944 FrameScope frame_scope(masm, StackFrame::MANUAL);
945 __ push(ebp); // Caller's frame pointer.
946 __ mov(ebp, esp);
947 __ push(kContextRegister); // Callee's context.
948 __ push(kJavaScriptCallTargetRegister); // Callee's JS function.
950 __ push(kJavaScriptCallArgCountRegister); // Actual argument count.
951
952 // Get the bytecode array from the function object and load it into
953 // kInterpreterBytecodeArrayRegister.
954 __ mov(eax, FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset));
955 ResetSharedFunctionInfoAge(masm, eax);
957 FieldOperand(eax, SharedFunctionInfo::kTrustedFunctionDataOffset));
958 GetSharedFunctionInfoBytecode(masm, kInterpreterBytecodeArrayRegister, eax);
959
960 // Check function data field is actually a BytecodeArray object.
961 if (v8_flags.debug_code) {
963 __ CmpObjectType(kInterpreterBytecodeArrayRegister, BYTECODE_ARRAY_TYPE,
964 eax);
965 __ Assert(
966 equal,
967 AbortReason::kFunctionDataShouldBeBytecodeArrayOnInterpreterEntry);
968 }
969
970 // Push bytecode array.
972 // Push Smi tagged initial bytecode array offset.
974 __ push(feedback_vector);
975
976 // Allocate the local and temporary register file on the stack.
977 Label stack_overflow;
978 {
979 // Load frame size from the BytecodeArray object.
980 Register frame_size = ecx;
982 BytecodeArray::kFrameSizeOffset));
983
984 // Do a stack check to ensure we don't go over the limit.
985 __ mov(eax, esp);
986 __ sub(eax, frame_size);
987 __ CompareStackLimit(eax, StackLimitKind::kRealStackLimit);
988 __ j(below, &stack_overflow);
989
990 // If ok, push undefined as the initial value for all register file entries.
991 Label loop_header;
992 Label loop_check;
993 __ LoadRoot(kInterpreterAccumulatorRegister, RootIndex::kUndefinedValue);
994 __ jmp(&loop_check);
995 __ bind(&loop_header);
996 // TODO(rmcilroy): Consider doing more than one push per loop iteration.
998 // Continue loop if not done.
999 __ bind(&loop_check);
1000 __ sub(frame_size, Immediate(kSystemPointerSize));
1001 __ j(greater_equal, &loop_header);
1002 }
1003
1004 // If the bytecode array has a valid incoming new target or generator object
1005 // register, initialize it with incoming value which was passed in edx.
1006 Label no_incoming_new_target_or_generator_register;
1007 __ mov(ecx, FieldOperand(
1009 BytecodeArray::kIncomingNewTargetOrGeneratorRegisterOffset));
1010 __ test(ecx, ecx);
1011 __ j(zero, &no_incoming_new_target_or_generator_register);
1012 __ mov(Operand(ebp, ecx, times_system_pointer_size, 0), edx);
1013 __ bind(&no_incoming_new_target_or_generator_register);
1014
1015 // Perform interrupt stack check.
1016 // TODO(solanes): Merge with the real stack limit check above.
1017 Label stack_check_interrupt, after_stack_check_interrupt;
1018 __ CompareStackLimit(esp, StackLimitKind::kInterruptStackLimit);
1019 __ j(below, &stack_check_interrupt);
1020 __ bind(&after_stack_check_interrupt);
1021
1022 // The accumulator is already loaded with undefined.
1023
1026
1027 // Load the dispatch table into a register and dispatch to the bytecode
1028 // handler at the current bytecode offset.
1029 Label do_dispatch;
1030 __ bind(&do_dispatch);
1032 Immediate(ExternalReference::interpreter_dispatch_table_address(
1033 masm->isolate())));
1034 __ movzx_b(ecx, Operand(kInterpreterBytecodeArrayRegister,
1040
1041 __ RecordComment("--- InterpreterEntryReturnPC point ---");
1043 masm->isolate()->heap()->SetInterpreterEntryReturnPCOffset(
1044 masm->pc_offset());
1045 } else {
1047 // Both versions must be the same up to this point otherwise the builtins
1048 // will not be interchangeable.
1049 CHECK_EQ(
1050 masm->isolate()->heap()->interpreter_entry_return_pc_offset().value(),
1051 masm->pc_offset());
1052 }
1053
1054 // Any returns to the entry trampoline are either due to the return bytecode
1055 // or the interpreter tail calling a builtin and then a dispatch.
1056
1057 // Get bytecode array and bytecode offset from the stack frame.
1063
1064 // Either return, or advance to the next bytecode and dispatch.
1065 Label do_return;
1066 __ Push(eax);
1070 &do_return);
1071 __ Pop(eax);
1072 __ jmp(&do_dispatch);
1073
1074 __ bind(&do_return);
1075 __ Pop(eax);
1076 // The return value is in eax.
1077 LeaveInterpreterFrame(masm, edx, ecx);
1078 __ ret(0);
1079
1080 __ bind(&stack_check_interrupt);
1081 // Modify the bytecode offset in the stack to be kFunctionEntryBytecodeOffset
1082 // for the call to the StackGuard.
1086 __ CallRuntime(Runtime::kStackGuard);
1087
1088 // After the call, restore the bytecode array, bytecode offset and accumulator
1089 // registers again. Also, restore the bytecode offset in the stack to its
1090 // previous value.
1095 __ LoadRoot(kInterpreterAccumulatorRegister, RootIndex::kUndefinedValue);
1096
1097 // It's ok to clobber kInterpreterBytecodeOffsetRegister since we are setting
1098 // it again after continuing.
1102
1103 __ jmp(&after_stack_check_interrupt);
1104
1105#ifndef V8_JITLESS
1106#ifndef V8_ENABLE_LEAPTIERING
1107 __ bind(&flags_need_processing);
1108 {
1109 // Restore actual argument count.
1110 __ movd(eax, xmm0);
1111 __ OptimizeCodeOrTailCallOptimizedCodeSlot(flags, xmm1);
1112 }
1113#endif // !V8_ENABLE_LEAPTIERING
1114
1115 __ bind(&compile_lazy);
1116 // Restore actual argument count.
1117 __ movd(eax, xmm0);
1118 __ GenerateTailCallToReturnedCode(Runtime::kCompileLazy);
1119
1120 __ bind(&is_baseline);
1121 {
1122#ifndef V8_ENABLE_LEAPTIERING
1123 __ movd(xmm2, ecx); // Save baseline data.
1124 // Load the feedback vector from the closure.
1125 __ mov(feedback_vector,
1126 FieldOperand(closure, JSFunction::kFeedbackCellOffset));
1127 __ mov(feedback_vector,
1128 FieldOperand(feedback_vector, FeedbackCell::kValueOffset));
1129
1130 Label install_baseline_code;
1131 // Check if feedback vector is valid. If not, call prepare for baseline to
1132 // allocate it.
1133 __ LoadMap(eax, feedback_vector);
1134 __ CmpInstanceType(eax, FEEDBACK_VECTOR_TYPE);
1135 __ j(not_equal, &install_baseline_code);
1136
1137 // Check the tiering state.
1138 __ LoadFeedbackVectorFlagsAndJumpIfNeedsProcessing(
1139 flags, xmm1, CodeKind::BASELINE, &flags_need_processing);
1140
1141 // Load the baseline code into the closure.
1142 __ movd(ecx, xmm2);
1143 static_assert(kJavaScriptCallCodeStartRegister == ecx, "ABI mismatch");
1144 __ push(edx); // Spill.
1145 __ push(ecx);
1146 __ Push(xmm0, eax); // Save the argument count (currently in xmm0).
1147 __ ReplaceClosureCodeWithOptimizedCode(ecx, closure, eax, ecx);
1148 __ pop(eax); // Restore the argument count.
1149 __ pop(ecx);
1150 __ pop(edx);
1151 __ JumpCodeObject(ecx);
1152
1153 __ bind(&install_baseline_code);
1154#endif // !V8_ENABLE_LEAPTIERING
1155
1156 __ movd(eax, xmm0); // Recover argument count.
1157 __ GenerateTailCallToReturnedCode(Runtime::kInstallBaselineCode);
1158 }
1159#endif // !V8_JITLESS
1160
1161 __ bind(&stack_overflow);
1162 __ CallRuntime(Runtime::kThrowStackOverflow);
1163 __ int3(); // Should not return.
1164}
1165
1166static void GenerateInterpreterPushArgs(MacroAssembler* masm,
1167 Register array_limit,
1168 Register start_address) {
1169 // ----------- S t a t e -------------
1170 // -- start_address : Pointer to the last argument in the args array.
1171 // -- array_limit : Pointer to one before the first argument in the
1172 // args array.
1173 // -----------------------------------
1174 ASM_CODE_COMMENT(masm);
1175 Label loop_header, loop_check;
1176 __ jmp(&loop_check);
1177 __ bind(&loop_header);
1178 __ Push(Operand(array_limit, 0));
1179 __ bind(&loop_check);
1180 __ add(array_limit, Immediate(kSystemPointerSize));
1181 __ cmp(array_limit, start_address);
1182 __ j(below_equal, &loop_header, Label::kNear);
1183}
1184
1185// static
1187 MacroAssembler* masm, ConvertReceiverMode receiver_mode,
1190 // ----------- S t a t e -------------
1191 // -- eax : the number of arguments
1192 // -- ecx : the address of the first argument to be pushed. Subsequent
1193 // arguments should be consecutive above this, in the same order as
1194 // they are to be pushed onto the stack.
1195 // -- edi : the target to call (can be any Object).
1196 // -----------------------------------
1197
1198 const Register scratch = edx;
1199 const Register argv = ecx;
1200
1201 Label stack_overflow;
1203 // The spread argument should not be pushed.
1204 __ dec(eax);
1205 }
1206
1207 // Add a stack check before pushing the arguments.
1208 __ StackOverflowCheck(eax, scratch, &stack_overflow, true);
1209 __ movd(xmm0, eax); // Spill number of arguments.
1210
1211 // Compute the expected number of arguments.
1212 __ mov(scratch, eax);
1213 if (receiver_mode == ConvertReceiverMode::kNullOrUndefined) {
1214 __ dec(scratch); // Exclude receiver.
1215 }
1216
1217 // Pop return address to allow tail-call after pushing arguments.
1218 __ PopReturnAddressTo(eax);
1219
1220 // Find the address of the last argument.
1221 __ shl(scratch, kSystemPointerSizeLog2);
1222 __ neg(scratch);
1223 __ add(scratch, argv);
1224
1226 __ movd(xmm1, scratch);
1227 GenerateInterpreterPushArgs(masm, scratch, argv);
1228 // Pass the spread in the register ecx.
1229 __ movd(ecx, xmm1);
1230 __ mov(ecx, Operand(ecx, 0));
1231 } else {
1232 GenerateInterpreterPushArgs(masm, scratch, argv);
1233 }
1234
1235 // Push "undefined" as the receiver arg if we need to.
1236 if (receiver_mode == ConvertReceiverMode::kNullOrUndefined) {
1237 __ PushRoot(RootIndex::kUndefinedValue);
1238 }
1239
1240 __ PushReturnAddressFrom(eax);
1241 __ movd(eax, xmm0); // Restore number of arguments.
1242
1243 // Call the target.
1245 __ TailCallBuiltin(Builtin::kCallWithSpread);
1246 } else {
1247 __ TailCallBuiltin(Builtins::Call(receiver_mode));
1248 }
1249
1250 __ bind(&stack_overflow);
1251 {
1252 __ TailCallRuntime(Runtime::kThrowStackOverflow);
1253
1254 // This should be unreachable.
1255 __ int3();
1256 }
1257}
1258
1259namespace {
1260
1261// This function modifies start_addr, and only reads the contents of num_args
1262// register. scratch1 and scratch2 are used as temporary registers.
1263void Generate_InterpreterPushZeroAndArgsAndReturnAddress(
1264 MacroAssembler* masm, Register num_args, Register start_addr,
1265 Register scratch1, Register scratch2, int num_slots_to_move,
1266 Label* stack_overflow) {
1267 // We have to move return address and the temporary registers above it
1268 // before we can copy arguments onto the stack. To achieve this:
1269 // Step 1: Increment the stack pointer by num_args + 1 for receiver (if it is
1270 // not included in argc already). Step 2: Move the return address and values
1271 // around it to the top of stack. Step 3: Copy the arguments into the correct
1272 // locations.
1273 // current stack =====> required stack layout
1274 // | | | return addr | (2) <-- esp (1)
1275 // | | | addtl. slot |
1276 // | | | arg N | (3)
1277 // | | | .... |
1278 // | | | arg 1 |
1279 // | return addr | <-- esp | arg 0 |
1280 // | addtl. slot | | receiver slot |
1281
1282 // Check for stack overflow before we increment the stack pointer.
1283 __ StackOverflowCheck(num_args, scratch1, stack_overflow, true);
1284
1285 // Step 1 - Update the stack pointer.
1286
1287 __ lea(scratch1, Operand(num_args, times_system_pointer_size, 0));
1288 __ AllocateStackSpace(scratch1);
1289
1290 // Step 2 move return_address and slots around it to the correct locations.
1291 // Move from top to bottom, otherwise we may overwrite when num_args = 0 or 1,
1292 // basically when the source and destination overlap. We at least need one
1293 // extra slot for receiver, so no extra checks are required to avoid copy.
1294 for (int i = 0; i < num_slots_to_move + 1; i++) {
1295 __ mov(scratch1, Operand(esp, num_args, times_system_pointer_size,
1297 __ mov(Operand(esp, i * kSystemPointerSize), scratch1);
1298 }
1299
1300 // Step 3 copy arguments to correct locations.
1301 // Slot meant for receiver contains return address. Reset it so that
1302 // we will not incorrectly interpret return address as an object.
1303 __ mov(Operand(esp, (num_slots_to_move + 1) * kSystemPointerSize),
1304 Immediate(0));
1305 __ mov(scratch1, Immediate(0));
1306
1307 Label loop_header, loop_check;
1308 __ jmp(&loop_check);
1309 __ bind(&loop_header);
1310 __ mov(scratch2, Operand(start_addr, 0));
1311 __ mov(Operand(esp, scratch1, times_system_pointer_size,
1312 (num_slots_to_move + 1) * kSystemPointerSize),
1313 scratch2);
1314 __ sub(start_addr, Immediate(kSystemPointerSize));
1315 __ bind(&loop_check);
1316 __ inc(scratch1);
1317 __ cmp(scratch1, eax);
1318 __ j(less, &loop_header, Label::kNear);
1319}
1320
1321} // anonymous namespace
1322
1323// static
1325 MacroAssembler* masm, InterpreterPushArgsMode mode) {
1326 // ----------- S t a t e -------------
1327 // -- eax : the number of arguments
1328 // -- ecx : the address of the first argument to be pushed. Subsequent
1329 // arguments should be consecutive above this, in the same order
1330 // as they are to be pushed onto the stack.
1331 // -- esp[0] : return address
1332 // -- esp[4] : allocation site feedback (if available or undefined)
1333 // -- esp[8] : the new target
1334 // -- esp[12] : the constructor
1335 // -----------------------------------
1336 Label stack_overflow;
1337
1339 // The spread argument should not be pushed.
1340 __ dec(eax);
1341 }
1342
1343 // Push arguments and move return address and stack spill slots to the top of
1344 // stack. The eax register is readonly. The ecx register will be modified. edx
1345 // and edi are used as scratch registers.
1346 Generate_InterpreterPushZeroAndArgsAndReturnAddress(
1347 masm, eax, ecx, edx, edi,
1349 &stack_overflow);
1350
1351 // Call the appropriate constructor. eax and ecx already contain intended
1352 // values, remaining registers still need to be initialized from the stack.
1353
1355 // Tail call to the array construct stub (still in the caller context at
1356 // this point).
1357
1358 __ movd(xmm0, eax); // Spill number of arguments.
1359 __ PopReturnAddressTo(eax);
1363 __ PushReturnAddressFrom(eax);
1364
1365 __ AssertFunction(kJavaScriptCallTargetRegister, eax);
1366 __ AssertUndefinedOrAllocationSite(kJavaScriptCallExtraArg1Register, eax);
1367
1368 __ movd(eax, xmm0); // Reload number of arguments.
1369 __ TailCallBuiltin(Builtin::kArrayConstructorImpl);
1370 } else if (mode == InterpreterPushArgsMode::kWithFinalSpread) {
1371 __ movd(xmm0, eax); // Spill number of arguments.
1372 __ PopReturnAddressTo(eax);
1373 __ Drop(1); // The allocation site is unused.
1376 // Pass the spread in the register ecx, overwriting ecx.
1377 __ mov(ecx, Operand(ecx, 0));
1378 __ PushReturnAddressFrom(eax);
1379 __ movd(eax, xmm0); // Reload number of arguments.
1380 __ TailCallBuiltin(Builtin::kConstructWithSpread);
1381 } else {
1383 __ PopReturnAddressTo(ecx);
1384 __ Drop(1); // The allocation site is unused.
1387 __ PushReturnAddressFrom(ecx);
1388
1389 __ TailCallBuiltin(Builtin::kConstruct);
1390 }
1391
1392 __ bind(&stack_overflow);
1393 __ TailCallRuntime(Runtime::kThrowStackOverflow);
1394 __ int3();
1395}
1396
1397namespace {
1398void LoadFramePointer(MacroAssembler* masm, Register to,
1399 Builtins::ForwardWhichFrame which_frame) {
1400 switch (which_frame) {
1402 __ mov(to, ebp);
1403 break;
1405 __ mov(to, Operand(ebp, StandardFrameConstants::kCallerFPOffset));
1406 break;
1407 }
1408}
1409} // namespace
1410
1411// static
1413 MacroAssembler* masm, ForwardWhichFrame which_frame) {
1414 // ----------- S t a t e -------------
1415 // -- edx : the new target
1416 // -- edi : the constructor
1417 // -- esp[0] : return address
1418 // -----------------------------------
1419 Label stack_overflow;
1420
1421 // Load the frame into ecx.
1422 LoadFramePointer(masm, ecx, which_frame);
1423
1424 // Load the argument count into eax.
1425 __ mov(eax, Operand(ecx, StandardFrameConstants::kArgCOffset));
1426
1427 // The following stack surgery is performed to forward arguments from the
1428 // interpreted frame.
1429 //
1430 // current stack =====> required stack layout
1431 // | | | saved new target | (2)
1432 // | | | saved constructor | (2)
1433 // | | | return addr | (3) <-- esp (1)
1434 // | | | arg N | (5)
1435 // | | | .... | (5)
1436 // | | | arg 0 | (5)
1437 // | return addr | <-- esp | 0 (receiver) | (4)
1438 //
1439 // The saved new target and constructor are popped to their respective
1440 // registers before calling the Construct builtin.
1441
1442 // Step 1
1443 //
1444 // Update the stack pointer, using ecx as a scratch register.
1445 __ StackOverflowCheck(eax, ecx, &stack_overflow, true);
1446 __ lea(ecx, Operand(eax, times_system_pointer_size, 0));
1447 __ AllocateStackSpace(ecx);
1448
1449 // Step 2
1450 //
1451 // Save the new target and constructor on the stack so they can be used as
1452 // scratch registers.
1453 __ Push(edi);
1454 __ Push(edx);
1455
1456 // Step 3
1457 //
1458 // Move the return address. Stack address computations have to be offset by
1459 // the saved constructor and new target on the stack.
1460 constexpr int spilledConstructorAndNewTargetOffset = 2 * kSystemPointerSize;
1461 __ mov(edx, Operand(esp, eax, times_system_pointer_size,
1462 spilledConstructorAndNewTargetOffset));
1463 __ mov(Operand(esp, spilledConstructorAndNewTargetOffset), edx);
1464
1465 // Step 4
1466 // Push a 0 for the receiver to be allocated.
1467 __ mov(
1468 Operand(esp, kSystemPointerSize + spilledConstructorAndNewTargetOffset),
1469 Immediate(0));
1470
1471 // Step 5
1472 //
1473 // Forward the arguments from the frame.
1474
1475 // First reload the frame pointer into ecx.
1476 LoadFramePointer(masm, ecx, which_frame);
1477
1478 // Point ecx to the base of the arguments, excluding the receiver.
1479 __ add(ecx, Immediate((StandardFrameConstants::kFixedSlotCountAboveFp + 1) *
1481 {
1482 // Copy the arguments.
1483 Register counter = edx;
1484 Register scratch = edi;
1485
1486 Label loop, entry;
1487 __ mov(counter, eax);
1488 __ jmp(&entry);
1489 __ bind(&loop);
1490 // The source frame's argument is offset by -kSystemPointerSize because the
1491 // counter with an argument count inclusive of the receiver.
1492 __ mov(scratch, Operand(ecx, counter, times_system_pointer_size,
1494 // Similarly, the target frame's argument is offset by +kSystemPointerSize
1495 // because we pushed a 0 for the receiver to be allocated.
1496 __ mov(Operand(esp, counter, times_system_pointer_size,
1497 kSystemPointerSize + spilledConstructorAndNewTargetOffset),
1498 scratch);
1499 __ bind(&entry);
1500 __ dec(counter);
1501 __ j(greater_equal, &loop, Label::kNear);
1502 }
1503
1504 // Pop the saved constructor and new target, then call the appropriate
1505 // constructor. eax already contains the argument count.
1508 __ TailCallBuiltin(Builtin::kConstruct);
1509
1510 __ bind(&stack_overflow);
1511 {
1512 __ TailCallRuntime(Runtime::kThrowStackOverflow);
1513 __ int3();
1514 }
1515}
1516
1517namespace {
1518
1519void NewImplicitReceiver(MacroAssembler* masm) {
1520 // ----------- S t a t e -------------
1521 // -- eax : argument count
1522 // -- edi : constructor to call
1523 // -- edx : new target (checked to be a JSFunction)
1524 //
1525 // Stack:
1526 // -- Implicit Receiver
1527 // -- [arguments without receiver]
1528 // -- Implicit Receiver
1529 // -- Context
1530 // -- FastConstructMarker
1531 // -- FramePointer
1532
1533 Register implicit_receiver = ecx;
1534
1535 // Save live registers.
1536 __ SmiTag(eax);
1537 __ Push(eax); // Number of arguments
1538 __ Push(edx); // NewTarget
1539 __ Push(edi); // Target
1540 __ CallBuiltin(Builtin::kFastNewObject);
1541 // Save result.
1542 __ mov(implicit_receiver, eax);
1543 // Restore live registers.
1544 __ Pop(edi);
1545 __ Pop(edx);
1546 __ Pop(eax);
1547 __ SmiUntag(eax);
1548
1549 // Patch implicit receiver (in arguments)
1550 __ mov(Operand(esp, 0 /* first argument */), implicit_receiver);
1551 // Patch second implicit (in construct frame)
1553 implicit_receiver);
1554
1555 // Restore context.
1556 __ mov(esi, Operand(ebp, FastConstructFrameConstants::kContextOffset));
1557}
1558
1559} // namespace
1560
1561// static
1562void Builtins::Generate_InterpreterPushArgsThenFastConstructFunction(
1563 MacroAssembler* masm) {
1564 // ----------- S t a t e -------------
1565 // -- eax : the number of arguments
1566 // -- ecx : the address of the first argument to be pushed. Subsequent
1567 // arguments should be consecutive above this, in the same order
1568 // as they are to be pushed onto the stack.
1569 // -- esi : the context
1570 // -- esp[0] : return address
1571 // -- esp[4] : allocation site feedback (if available or undefined)
1572 // -- esp[8] : the new target
1573 // -- esp[12] : the constructor (checked to be a JSFunction)
1574 // -----------------------------------
1575
1576 // Load constructor.
1577 __ mov(edi, Operand(esp, 3 * kSystemPointerSize));
1578 __ AssertFunction(edi, edx);
1579
1580 // Check if target has a [[Construct]] internal method.
1581 Label non_constructor;
1582 // Load constructor.
1583 __ LoadMap(edx, edi);
1584 __ test_b(FieldOperand(edx, Map::kBitFieldOffset),
1585 Immediate(Map::Bits1::IsConstructorBit::kMask));
1586 __ j(zero, &non_constructor);
1587
1588 // Add a stack check before pushing arguments.
1589 Label stack_overflow;
1590 __ StackOverflowCheck(eax, edx, &stack_overflow, true);
1591
1592 // Spill number of arguments.
1593 __ movd(xmm0, eax);
1594
1595 // Load NewTarget.
1596 __ mov(edx, Operand(esp, 2 * kSystemPointerSize));
1597
1598 // Drop stub arguments from the stack.
1599 __ PopReturnAddressTo(eax);
1600 __ Drop(3); // The allocation site is unused.
1601 __ PushReturnAddressFrom(eax);
1602
1603 // Enter a construct frame.
1604 FrameScope scope(masm, StackFrame::MANUAL);
1605 __ EnterFrame(StackFrame::FAST_CONSTRUCT);
1606 __ Push(esi);
1607 // Implicit receiver stored in the construct frame.
1608 __ PushRoot(RootIndex::kTheHoleValue);
1609
1610 // Push arguments + implicit receiver
1611 __ movd(eax, xmm0); // Recover number of arguments.
1612 // Find the address of the last argument.
1613 __ lea(esi, Operand(eax, times_system_pointer_size,
1615 __ neg(esi);
1616 __ add(esi, ecx);
1617 GenerateInterpreterPushArgs(masm, esi, ecx);
1618 __ PushRoot(RootIndex::kTheHoleValue);
1619
1620 // Restore context.
1621 __ mov(esi, Operand(ebp, FastConstructFrameConstants::kContextOffset));
1622
1623 // Check if it is a builtin call.
1624 Label builtin_call;
1625 __ mov(ecx, FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset));
1626 __ test(FieldOperand(ecx, SharedFunctionInfo::kFlagsOffset),
1627 Immediate(SharedFunctionInfo::ConstructAsBuiltinBit::kMask));
1628 __ j(not_zero, &builtin_call);
1629
1630 // Check if we need to create an implicit receiver.
1631 Label not_create_implicit_receiver;
1632 __ mov(ecx, FieldOperand(ecx, SharedFunctionInfo::kFlagsOffset));
1633 __ DecodeField<SharedFunctionInfo::FunctionKindBits>(ecx);
1634 __ JumpIfIsInRange(
1635 ecx, static_cast<uint32_t>(FunctionKind::kDefaultDerivedConstructor),
1636 static_cast<uint32_t>(FunctionKind::kDerivedConstructor), ecx,
1637 &not_create_implicit_receiver, Label::kNear);
1638 NewImplicitReceiver(masm);
1639 __ bind(&not_create_implicit_receiver);
1640
1641 // Call the constructor.
1642 __ InvokeFunction(edi, edx, eax, InvokeType::kCall);
1643
1644 // ----------- S t a t e -------------
1645 // -- eax constructor result
1646 //
1647 // Stack:
1648 // -- Implicit Receiver
1649 // -- Context
1650 // -- FastConstructMarker
1651 // -- FramePointer
1652 // -----------------------------------
1653
1654 // Store offset of return address for deoptimizer.
1655 masm->isolate()->heap()->SetConstructStubInvokeDeoptPCOffset(
1656 masm->pc_offset());
1657
1658 // If the result is an object (in the ECMA sense), we should get rid
1659 // of the receiver and use the result; see ECMA-262 section 13.2.2-7
1660 // on page 74.
1661
1662 Label check_result, use_receiver, do_throw, leave_and_return;
1663 // If the result is undefined, we jump out to using the implicit receiver.
1664 __ JumpIfNotRoot(eax, RootIndex::kUndefinedValue, &check_result,
1665 Label::kNear);
1666
1667 // Throw away the result of the constructor invocation and use the
1668 // on-stack receiver as the result.
1669 __ bind(&use_receiver);
1670 __ mov(eax, Operand(esp, 0 * kSystemPointerSize));
1671 __ JumpIfRoot(eax, RootIndex::kTheHoleValue, &do_throw);
1672
1673 __ bind(&leave_and_return);
1674 __ LeaveFrame(StackFrame::FAST_CONSTRUCT);
1675 __ ret(0);
1676
1677 // Otherwise we do a smi check and fall through to check if the return value
1678 // is a valid receiver.
1679 __ bind(&check_result);
1680
1681 // If the result is a smi, it is *not* an object in the ECMA sense.
1682 __ JumpIfSmi(eax, &use_receiver, Label::kNear);
1683
1684 // If the type of the result (stored in its map) is less than
1685 // FIRST_JS_RECEIVER_TYPE, it is not an object in the ECMA sense.
1686 static_assert(LAST_JS_RECEIVER_TYPE == LAST_TYPE);
1687 __ CmpObjectType(eax, FIRST_JS_RECEIVER_TYPE, ecx);
1688 __ j(above_equal, &leave_and_return, Label::kNear);
1689 __ jmp(&use_receiver, Label::kNear);
1690
1691 __ bind(&do_throw);
1692 // Restore context from the frame.
1693 __ mov(esi, Operand(ebp, ConstructFrameConstants::kContextOffset));
1694 __ CallRuntime(Runtime::kThrowConstructorReturnedNonObject);
1695 // This should be unreachable.
1696 __ int3();
1697
1698 __ bind(&builtin_call);
1699 __ InvokeFunction(edi, edx, eax, InvokeType::kCall);
1700 __ LeaveFrame(StackFrame::FAST_CONSTRUCT);
1701 __ ret(0);
1702
1703 // Called Construct on an Object that doesn't have a [[Construct]] internal
1704 // method.
1705 __ bind(&non_constructor);
1706 __ TailCallBuiltin(Builtin::kConstructedNonConstructable);
1707
1708 // Throw stack overflow exception.
1709 __ bind(&stack_overflow);
1710 __ TailCallRuntime(Runtime::kThrowStackOverflow);
1711 // This should be unreachable.
1712 __ int3();
1713}
1714
1715static void Generate_InterpreterEnterBytecode(MacroAssembler* masm) {
1716 // Set the return address to the correct point in the interpreter entry
1717 // trampoline.
1718 Label builtin_trampoline, trampoline_loaded;
1719 Tagged<Smi> interpreter_entry_return_pc_offset(
1720 masm->isolate()->heap()->interpreter_entry_return_pc_offset());
1721 DCHECK_NE(interpreter_entry_return_pc_offset, Smi::zero());
1722
1723 static constexpr Register scratch = ecx;
1724
1725 // If the SFI function_data is an InterpreterData, the function will have a
1726 // custom copy of the interpreter entry trampoline for profiling. If so,
1727 // get the custom trampoline, otherwise grab the entry address of the global
1728 // trampoline.
1729 __ mov(scratch, Operand(ebp, StandardFrameConstants::kFunctionOffset));
1730 __ mov(scratch, FieldOperand(scratch, JSFunction::kSharedFunctionInfoOffset));
1731 __ mov(scratch,
1732 FieldOperand(scratch, SharedFunctionInfo::kTrustedFunctionDataOffset));
1733 __ Push(eax);
1734 __ CmpObjectType(scratch, INTERPRETER_DATA_TYPE, eax);
1735 __ j(not_equal, &builtin_trampoline, Label::kNear);
1736
1737 __ mov(scratch,
1738 FieldOperand(scratch, InterpreterData::kInterpreterTrampolineOffset));
1739 __ LoadCodeInstructionStart(scratch, scratch);
1740 __ jmp(&trampoline_loaded, Label::kNear);
1741
1742 __ bind(&builtin_trampoline);
1743 __ mov(scratch,
1744 __ ExternalReferenceAsOperand(
1745 ExternalReference::
1746 address_of_interpreter_entry_trampoline_instruction_start(
1747 masm->isolate()),
1748 scratch));
1749
1750 __ bind(&trampoline_loaded);
1751 __ Pop(eax);
1752 __ add(scratch, Immediate(interpreter_entry_return_pc_offset.value()));
1753 __ push(scratch);
1754
1755 // Initialize the dispatch table register.
1757 Immediate(ExternalReference::interpreter_dispatch_table_address(
1758 masm->isolate())));
1759
1760 // Get the bytecode array pointer from the frame.
1763
1764 if (v8_flags.debug_code) {
1765 // Check function data field is actually a BytecodeArray object.
1767 __ CmpObjectType(kInterpreterBytecodeArrayRegister, BYTECODE_ARRAY_TYPE,
1768 scratch);
1769 __ Assert(
1770 equal,
1771 AbortReason::kFunctionDataShouldBeBytecodeArrayOnInterpreterEntry);
1772 }
1773
1774 // Get the target bytecode offset from the frame.
1778
1779 if (v8_flags.debug_code) {
1780 Label okay;
1783 __ j(greater_equal, &okay, Label::kNear);
1784 __ int3();
1785 __ bind(&okay);
1786 }
1787
1788 // Dispatch to the target bytecode.
1789 __ movzx_b(scratch, Operand(kInterpreterBytecodeArrayRegister,
1792 Operand(kInterpreterDispatchTableRegister, scratch,
1795}
1796
1797void Builtins::Generate_InterpreterEnterAtNextBytecode(MacroAssembler* masm) {
1798 // Get bytecode array and bytecode offset from the stack frame.
1804
1805 Label enter_bytecode, function_entry_bytecode;
1809 __ j(equal, &function_entry_bytecode);
1810
1811 // Advance to the next bytecode.
1812 Label if_return;
1813 __ Push(eax);
1816 eax, &if_return);
1817 __ Pop(eax);
1818
1819 __ bind(&enter_bytecode);
1820 // Convert new bytecode offset to a Smi and save in the stackframe.
1822 __ SmiTag(ecx);
1824
1826
1827 __ bind(&function_entry_bytecode);
1828 // If the code deoptimizes during the implicit function entry stack interrupt
1829 // check, it will have a bailout ID of kFunctionEntryBytecodeOffset, which is
1830 // not a valid bytecode offset. Detect this case and advance to the first
1831 // actual bytecode.
1834 __ jmp(&enter_bytecode);
1835
1836 // We should never take the if_return path.
1837 __ bind(&if_return);
1838 // No need to pop eax here since we will be aborting anyway.
1839 __ Abort(AbortReason::kInvalidBytecodeAdvance);
1840}
1841
1842void Builtins::Generate_InterpreterEnterAtBytecode(MacroAssembler* masm) {
1844}
1845
1846// static
1847void Builtins::Generate_BaselineOutOfLinePrologue(MacroAssembler* masm) {
1848 auto descriptor =
1849 Builtins::CallInterfaceDescriptorFor(Builtin::kBaselineOutOfLinePrologue);
1850 Register arg_count = descriptor.GetRegisterParameter(
1851 BaselineOutOfLinePrologueDescriptor::kJavaScriptCallArgCount);
1852 Register frame_size = descriptor.GetRegisterParameter(
1853 BaselineOutOfLinePrologueDescriptor::kStackFrameSize);
1854
1855 // Save argument count and bytecode array.
1856 XMMRegister saved_arg_count = xmm0;
1857 XMMRegister saved_bytecode_array = xmm1;
1858 XMMRegister saved_frame_size = xmm2;
1859 XMMRegister saved_feedback_cell = xmm3;
1860 XMMRegister saved_feedback_vector = xmm4;
1861 __ movd(saved_arg_count, arg_count);
1862 __ movd(saved_frame_size, frame_size);
1863
1864 // Use the arg count (eax) as the scratch register.
1865 Register scratch = arg_count;
1866
1867 // Load the feedback cell and vector from the closure.
1868 Register closure = descriptor.GetRegisterParameter(
1869 BaselineOutOfLinePrologueDescriptor::kClosure);
1870 Register feedback_cell = ecx;
1871 __ mov(feedback_cell, FieldOperand(closure, JSFunction::kFeedbackCellOffset));
1872 __ movd(saved_feedback_cell, feedback_cell);
1873 Register feedback_vector = ecx;
1874 __ mov(feedback_vector,
1875 FieldOperand(feedback_cell, FeedbackCell::kValueOffset));
1876 __ AssertFeedbackVector(feedback_vector, scratch);
1877 feedback_cell = no_reg;
1878
1879#ifdef V8_ENABLE_LEAPTIERING
1880 __ movd(saved_feedback_vector, feedback_vector);
1881#else
1882 // Load the optimization state from the feedback vector and reuse the
1883 // register.
1884 Label flags_need_processing;
1885 Register flags = ecx;
1886 __ LoadFeedbackVectorFlagsAndJumpIfNeedsProcessing(
1887 flags, saved_feedback_vector, CodeKind::BASELINE, &flags_need_processing);
1888
1889 // Reload the feedback vector.
1890 __ movd(feedback_vector, saved_feedback_vector);
1891#endif // !V8_ENABLE_LEAPTIERING
1892
1893 {
1894 DCHECK_EQ(arg_count, eax);
1895 ResetFeedbackVectorOsrUrgency(masm, feedback_vector, eax);
1896 __ movd(arg_count, saved_arg_count); // Restore eax.
1897 }
1898
1899 // Increment the invocation count.
1900 __ inc(FieldOperand(feedback_vector, FeedbackVector::kInvocationCountOffset));
1901
1902 XMMRegister return_address = xmm5;
1903 // Save the return address, so that we can push it to the end of the newly
1904 // set-up frame once we're done setting it up.
1905 __ PopReturnAddressTo(return_address, scratch);
1906 // The bytecode array was pushed to the stack by the caller.
1907 __ Pop(saved_bytecode_array, scratch);
1908 FrameScope frame_scope(masm, StackFrame::MANUAL);
1909 {
1910 ASM_CODE_COMMENT_STRING(masm, "Frame Setup");
1911 __ EnterFrame(StackFrame::BASELINE);
1912
1913 __ Push(descriptor.GetRegisterParameter(
1914 BaselineOutOfLinePrologueDescriptor::kCalleeContext)); // Callee's
1915 // context.
1916 Register callee_js_function = descriptor.GetRegisterParameter(
1917 BaselineOutOfLinePrologueDescriptor::kClosure);
1918 DCHECK_EQ(callee_js_function, kJavaScriptCallTargetRegister);
1919 DCHECK_EQ(callee_js_function, kJSFunctionRegister);
1920 ResetJSFunctionAge(masm, callee_js_function, scratch);
1921 __ Push(callee_js_function); // Callee's JS function.
1922 __ Push(saved_arg_count, scratch); // Push actual argument count.
1923
1924 // We'll use the bytecode for both code age/OSR resetting, and pushing onto
1925 // the frame, so load it into a register.
1926 __ Push(saved_bytecode_array, scratch);
1927 __ Push(saved_feedback_cell, scratch);
1928 __ Push(saved_feedback_vector, scratch);
1929 }
1930
1931 Label call_stack_guard;
1932 {
1933 ASM_CODE_COMMENT_STRING(masm, "Stack/interrupt check");
1934 // Stack check. This folds the checks for both the interrupt stack limit
1935 // check and the real stack limit into one by just checking for the
1936 // interrupt limit. The interrupt limit is either equal to the real stack
1937 // limit or tighter. By ensuring we have space until that limit after
1938 // building the frame we can quickly precheck both at once.
1939 //
1940 // TODO(v8:11429): Backport this folded check to the
1941 // InterpreterEntryTrampoline.
1942 __ movd(frame_size, saved_frame_size);
1943 __ Move(scratch, esp);
1945 __ sub(scratch, frame_size);
1946 __ CompareStackLimit(scratch, StackLimitKind::kInterruptStackLimit);
1947 __ j(below, &call_stack_guard);
1948 }
1949
1950 // Push the return address back onto the stack for return.
1951 __ PushReturnAddressFrom(return_address, scratch);
1952 // Return to caller pushed pc, without any frame teardown.
1953 __ LoadRoot(kInterpreterAccumulatorRegister, RootIndex::kUndefinedValue);
1954 __ Ret();
1955
1956#ifndef V8_ENABLE_LEAPTIERING
1957 __ bind(&flags_need_processing);
1958 {
1959 ASM_CODE_COMMENT_STRING(masm, "Optimized marker check");
1960 // Drop the return address and bytecode array, rebalancing the return stack
1961 // buffer by using JumpMode::kPushAndReturn. We can't leave the slot and
1962 // overwrite it on return since we may do a runtime call along the way that
1963 // requires the stack to only contain valid frames.
1964 __ Drop(2);
1965 __ movd(arg_count, saved_arg_count); // Restore actual argument count.
1966 __ OptimizeCodeOrTailCallOptimizedCodeSlot(flags, saved_feedback_vector);
1967 __ Trap();
1968 }
1969#endif
1970
1971 __ bind(&call_stack_guard);
1972 {
1973 ASM_CODE_COMMENT_STRING(masm, "Stack/interrupt call");
1974 {
1975 // Push the baseline code return address now, as if it had been pushed by
1976 // the call to this builtin.
1977 __ PushReturnAddressFrom(return_address, scratch);
1978 FrameScope manual_frame_scope(masm, StackFrame::INTERNAL);
1979 // Save incoming new target or generator
1981 __ SmiTag(frame_size);
1982 __ Push(frame_size);
1983 __ CallRuntime(Runtime::kStackGuardWithGap, 1);
1985 }
1986
1987 // Return to caller pushed pc, without any frame teardown.
1988 __ LoadRoot(kInterpreterAccumulatorRegister, RootIndex::kUndefinedValue);
1989 __ Ret();
1990 }
1991}
1992
1993// static
1994void Builtins::Generate_BaselineOutOfLinePrologueDeopt(MacroAssembler* masm) {
1995 // We're here because we got deopted during BaselineOutOfLinePrologue's stack
1996 // check. Undo all its frame creation and call into the interpreter instead.
1997
1998 // Drop the feedback vector.
1999 __ Pop(ecx);
2000 // Drop bytecode offset (was the feedback vector but got replaced during
2001 // deopt).
2002 __ Pop(ecx);
2003 // Drop bytecode array
2004 __ Pop(ecx);
2005
2006 // argc.
2008 // Closure.
2010 // Context.
2011 __ Pop(kContextRegister);
2012
2013 // Drop frame pointer
2014 __ LeaveFrame(StackFrame::BASELINE);
2015
2016 // Enter the interpreter.
2017 __ TailCallBuiltin(Builtin::kInterpreterEntryTrampoline);
2018}
2019
2020namespace {
2021void Generate_ContinueToBuiltinHelper(MacroAssembler* masm,
2022 bool javascript_builtin,
2023 bool with_result) {
2024 const RegisterConfiguration* config(RegisterConfiguration::Default());
2025 int allocatable_register_count = config->num_allocatable_general_registers();
2026 if (with_result) {
2027 if (javascript_builtin) {
2028 // xmm0 is not included in the allocateable registers.
2029 __ movd(xmm0, eax);
2030 } else {
2031 // Overwrite the hole inserted by the deoptimizer with the return value
2032 // from the LAZY deopt point.
2033 __ mov(
2034 Operand(esp, config->num_allocatable_general_registers() *
2037 eax);
2038 }
2039 }
2040
2041 // Replace the builtin index Smi on the stack with the start address of the
2042 // builtin loaded from the builtins table. The ret below will return to this
2043 // address.
2044 int offset_to_builtin_index = allocatable_register_count * kSystemPointerSize;
2045 __ mov(eax, Operand(esp, offset_to_builtin_index));
2046 __ LoadEntryFromBuiltinIndex(eax, eax);
2047 __ mov(Operand(esp, offset_to_builtin_index), eax);
2048
2049 for (int i = allocatable_register_count - 1; i >= 0; --i) {
2050 int code = config->GetAllocatableGeneralCode(i);
2051 __ pop(Register::from_code(code));
2052 if (javascript_builtin && code == kJavaScriptCallArgCountRegister.code()) {
2054 }
2055 }
2056 if (with_result && javascript_builtin) {
2057 // Overwrite the hole inserted by the deoptimizer with the return value from
2058 // the LAZY deopt point. eax contains the arguments count, the return value
2059 // from LAZY is always the last argument.
2060 __ movd(Operand(esp, eax, times_system_pointer_size,
2063 xmm0);
2064 }
2065 __ mov(
2066 ebp,
2068 const int offsetToPC =
2071 __ pop(Operand(esp, offsetToPC));
2072 __ Drop(offsetToPC / kSystemPointerSize);
2073 __ ret(0);
2074}
2075} // namespace
2076
2077void Builtins::Generate_ContinueToCodeStubBuiltin(MacroAssembler* masm) {
2078 Generate_ContinueToBuiltinHelper(masm, false, false);
2079}
2080
2081void Builtins::Generate_ContinueToCodeStubBuiltinWithResult(
2082 MacroAssembler* masm) {
2083 Generate_ContinueToBuiltinHelper(masm, false, true);
2084}
2085
2086void Builtins::Generate_ContinueToJavaScriptBuiltin(MacroAssembler* masm) {
2087 Generate_ContinueToBuiltinHelper(masm, true, false);
2088}
2089
2090void Builtins::Generate_ContinueToJavaScriptBuiltinWithResult(
2091 MacroAssembler* masm) {
2092 Generate_ContinueToBuiltinHelper(masm, true, true);
2093}
2094
2095void Builtins::Generate_NotifyDeoptimized(MacroAssembler* masm) {
2096 {
2097 FrameScope scope(masm, StackFrame::INTERNAL);
2098 __ CallRuntime(Runtime::kNotifyDeoptimized);
2099 // Tear down internal frame.
2100 }
2101
2103 __ mov(eax, Operand(esp, 1 * kSystemPointerSize));
2104 __ ret(1 * kSystemPointerSize); // Remove eax.
2105}
2106
2107// static
2108void Builtins::Generate_FunctionPrototypeApply(MacroAssembler* masm) {
2109 // ----------- S t a t e -------------
2110 // -- eax : argc
2111 // -- esp[0] : return address
2112 // -- esp[1] : receiver
2113 // -- esp[2] : thisArg
2114 // -- esp[3] : argArray
2115 // -----------------------------------
2116
2117 // 1. Load receiver into xmm0, argArray into edx (if present), remove all
2118 // arguments from the stack (including the receiver), and push thisArg (if
2119 // present) instead.
2120 {
2121 Label no_arg_array, no_this_arg;
2122 StackArgumentsAccessor args(eax);
2123 // Spill receiver to allow the usage of edi as a scratch register.
2124 __ movd(xmm0, args.GetReceiverOperand());
2125
2126 __ LoadRoot(edx, RootIndex::kUndefinedValue);
2127 __ mov(edi, edx);
2128 __ cmp(eax, Immediate(JSParameterCount(0)));
2129 __ j(equal, &no_this_arg, Label::kNear);
2130 {
2131 __ mov(edi, args[1]);
2132 __ cmp(eax, Immediate(JSParameterCount(1)));
2133 __ j(equal, &no_arg_array, Label::kNear);
2134 __ mov(edx, args[2]);
2135 __ bind(&no_arg_array);
2136 }
2137 __ bind(&no_this_arg);
2138 __ DropArgumentsAndPushNewReceiver(eax, edi, ecx);
2139
2140 // Restore receiver to edi.
2141 __ movd(edi, xmm0);
2142 }
2143
2144 // ----------- S t a t e -------------
2145 // -- edx : argArray
2146 // -- edi : receiver
2147 // -- esp[0] : return address
2148 // -- esp[4] : thisArg
2149 // -----------------------------------
2150
2151 // 2. We don't need to check explicitly for callable receiver here,
2152 // since that's the first thing the Call/CallWithArrayLike builtins
2153 // will do.
2154
2155 // 3. Tail call with no arguments if argArray is null or undefined.
2156 Label no_arguments;
2157 __ JumpIfRoot(edx, RootIndex::kNullValue, &no_arguments, Label::kNear);
2158 __ JumpIfRoot(edx, RootIndex::kUndefinedValue, &no_arguments, Label::kNear);
2159
2160 // 4a. Apply the receiver to the given argArray.
2161 __ TailCallBuiltin(Builtin::kCallWithArrayLike);
2162
2163 // 4b. The argArray is either null or undefined, so we tail call without any
2164 // arguments to the receiver.
2165 __ bind(&no_arguments);
2166 {
2167 __ Move(eax, JSParameterCount(0));
2168 __ TailCallBuiltin(Builtins::Call());
2169 }
2170}
2171
2172// static
2173void Builtins::Generate_FunctionPrototypeCall(MacroAssembler* masm) {
2174 // Stack Layout:
2175 // esp[0] : Return address
2176 // esp[8] : Argument 0 (receiver: callable to call)
2177 // esp[16] : Argument 1
2178 // ...
2179 // esp[8 * n] : Argument n-1
2180 // esp[8 * (n + 1)] : Argument n
2181 // eax contains the number of arguments, n.
2182
2183 // 1. Get the callable to call (passed as receiver) from the stack.
2184 {
2185 StackArgumentsAccessor args(eax);
2186 __ mov(edi, args.GetReceiverOperand());
2187 }
2188
2189 // 2. Save the return address and drop the callable.
2190 __ PopReturnAddressTo(edx);
2191 __ Pop(ecx);
2192
2193 // 3. Make sure we have at least one argument.
2194 {
2195 Label done;
2196 __ cmp(eax, Immediate(JSParameterCount(0)));
2197 __ j(greater, &done, Label::kNear);
2198 __ PushRoot(RootIndex::kUndefinedValue);
2199 __ inc(eax);
2200 __ bind(&done);
2201 }
2202
2203 // 4. Push back the return address one slot down on the stack (overwriting the
2204 // original callable), making the original first argument the new receiver.
2205 __ PushReturnAddressFrom(edx);
2206 __ dec(eax); // One fewer argument (first argument is new receiver).
2207
2208 // 5. Call the callable.
2209 __ TailCallBuiltin(Builtins::Call());
2210}
2211
2212void Builtins::Generate_ReflectApply(MacroAssembler* masm) {
2213 // ----------- S t a t e -------------
2214 // -- eax : argc
2215 // -- esp[0] : return address
2216 // -- esp[4] : receiver
2217 // -- esp[8] : target (if argc >= 1)
2218 // -- esp[12] : thisArgument (if argc >= 2)
2219 // -- esp[16] : argumentsList (if argc == 3)
2220 // -----------------------------------
2221
2222 // 1. Load target into edi (if present), argumentsList into edx (if present),
2223 // remove all arguments from the stack (including the receiver), and push
2224 // thisArgument (if present) instead.
2225 {
2226 Label done;
2227 StackArgumentsAccessor args(eax);
2228 __ LoadRoot(edi, RootIndex::kUndefinedValue);
2229 __ mov(edx, edi);
2230 __ mov(ecx, edi);
2231 __ cmp(eax, Immediate(JSParameterCount(1)));
2232 __ j(below, &done, Label::kNear);
2233 __ mov(edi, args[1]); // target
2234 __ j(equal, &done, Label::kNear);
2235 __ mov(ecx, args[2]); // thisArgument
2236 __ cmp(eax, Immediate(JSParameterCount(3)));
2237 __ j(below, &done, Label::kNear);
2238 __ mov(edx, args[3]); // argumentsList
2239 __ bind(&done);
2240
2241 // Spill argumentsList to use edx as a scratch register.
2242 __ movd(xmm0, edx);
2243
2244 __ DropArgumentsAndPushNewReceiver(eax, ecx, edx);
2245
2246 // Restore argumentsList.
2247 __ movd(edx, xmm0);
2248 }
2249
2250 // ----------- S t a t e -------------
2251 // -- edx : argumentsList
2252 // -- edi : target
2253 // -- esp[0] : return address
2254 // -- esp[4] : thisArgument
2255 // -----------------------------------
2256
2257 // 2. We don't need to check explicitly for callable target here,
2258 // since that's the first thing the Call/CallWithArrayLike builtins
2259 // will do.
2260
2261 // 3. Apply the target to the given argumentsList.
2262 __ TailCallBuiltin(Builtin::kCallWithArrayLike);
2263}
2264
2265void Builtins::Generate_ReflectConstruct(MacroAssembler* masm) {
2266 // ----------- S t a t e -------------
2267 // -- eax : argc
2268 // -- esp[0] : return address
2269 // -- esp[4] : receiver
2270 // -- esp[8] : target
2271 // -- esp[12] : argumentsList
2272 // -- esp[16] : new.target (optional)
2273 // -----------------------------------
2274
2275 // 1. Load target into edi (if present), argumentsList into ecx (if present),
2276 // new.target into edx (if present, otherwise use target), remove all
2277 // arguments from the stack (including the receiver), and push thisArgument
2278 // (if present) instead.
2279 {
2280 Label done;
2281 StackArgumentsAccessor args(eax);
2282 __ LoadRoot(edi, RootIndex::kUndefinedValue);
2283 __ mov(edx, edi);
2284 __ mov(ecx, edi);
2285 __ cmp(eax, Immediate(JSParameterCount(1)));
2286 __ j(below, &done, Label::kNear);
2287 __ mov(edi, args[1]); // target
2288 __ mov(edx, edi);
2289 __ j(equal, &done, Label::kNear);
2290 __ mov(ecx, args[2]); // argumentsList
2291 __ cmp(eax, Immediate(JSParameterCount(3)));
2292 __ j(below, &done, Label::kNear);
2293 __ mov(edx, args[3]); // new.target
2294 __ bind(&done);
2295
2296 // Spill argumentsList to use ecx as a scratch register.
2297 __ movd(xmm0, ecx);
2298
2299 __ DropArgumentsAndPushNewReceiver(
2300 eax, masm->RootAsOperand(RootIndex::kUndefinedValue), ecx);
2301
2302 // Restore argumentsList.
2303 __ movd(ecx, xmm0);
2304 }
2305
2306 // ----------- S t a t e -------------
2307 // -- ecx : argumentsList
2308 // -- edx : new.target
2309 // -- edi : target
2310 // -- esp[0] : return address
2311 // -- esp[4] : receiver (undefined)
2312 // -----------------------------------
2313
2314 // 2. We don't need to check explicitly for constructor target here,
2315 // since that's the first thing the Construct/ConstructWithArrayLike
2316 // builtins will do.
2317
2318 // 3. We don't need to check explicitly for constructor new.target here,
2319 // since that's the second thing the Construct/ConstructWithArrayLike
2320 // builtins will do.
2321
2322 // 4. Construct the target with the given new.target and argumentsList.
2323 __ TailCallBuiltin(Builtin::kConstructWithArrayLike);
2324}
2325
2326namespace {
2327
2328// Allocate new stack space for |count| arguments and shift all existing
2329// arguments already on the stack. |pointer_to_new_space_out| points to the
2330// first free slot on the stack to copy additional arguments to and
2331// |argc_in_out| is updated to include |count|.
2332void Generate_AllocateSpaceAndShiftExistingArguments(
2333 MacroAssembler* masm, Register count, Register argc_in_out,
2334 Register pointer_to_new_space_out, Register scratch1, Register scratch2) {
2335 DCHECK(!AreAliased(count, argc_in_out, pointer_to_new_space_out, scratch1,
2336 scratch2));
2337 // Use pointer_to_new_space_out as scratch until we set it to the correct
2338 // value at the end.
2339 Register old_esp = pointer_to_new_space_out;
2340 Register new_space = scratch1;
2341 __ mov(old_esp, esp);
2342
2343 __ lea(new_space, Operand(count, times_system_pointer_size, 0));
2344 __ AllocateStackSpace(new_space);
2345
2346 Register current = scratch1;
2347 Register value = scratch2;
2348
2349 Label loop, entry;
2350 __ mov(current, 0);
2351 __ jmp(&entry);
2352 __ bind(&loop);
2353 __ mov(value, Operand(old_esp, current, times_system_pointer_size, 0));
2354 __ mov(Operand(esp, current, times_system_pointer_size, 0), value);
2355 __ inc(current);
2356 __ bind(&entry);
2357 __ cmp(current, argc_in_out);
2358 __ j(less_equal, &loop, Label::kNear);
2359
2360 // Point to the next free slot above the shifted arguments (argc + 1 slot for
2361 // the return address).
2362 __ lea(
2363 pointer_to_new_space_out,
2364 Operand(esp, argc_in_out, times_system_pointer_size, kSystemPointerSize));
2365 // Update the total number of arguments.
2366 __ add(argc_in_out, count);
2367}
2368
2369} // namespace
2370
2371// static
2372// TODO(v8:11615): Observe Code::kMaxArguments in CallOrConstructVarargs
2373void Builtins::Generate_CallOrConstructVarargs(MacroAssembler* masm,
2374 Builtin target_builtin) {
2375 // ----------- S t a t e -------------
2376 // -- edi : target
2377 // -- esi : context for the Call / Construct builtin
2378 // -- eax : number of parameters on the stack
2379 // -- ecx : len (number of elements to from args)
2380 // -- edx : new.target (checked to be constructor or undefined)
2381 // -- esp[4] : arguments list (a FixedArray)
2382 // -- esp[0] : return address.
2383 // -----------------------------------
2384
2385 __ movd(xmm0, edx); // Spill new.target.
2386 __ movd(xmm1, edi); // Spill target.
2387 __ movd(xmm3, esi); // Spill the context.
2388
2389 const Register kArgumentsList = esi;
2390 const Register kArgumentsLength = ecx;
2391
2392 __ PopReturnAddressTo(edx);
2393 __ pop(kArgumentsList);
2394 __ PushReturnAddressFrom(edx);
2395
2396 if (v8_flags.debug_code) {
2397 // Allow kArgumentsList to be a FixedArray, or a FixedDoubleArray if
2398 // kArgumentsLength == 0.
2399 Label ok, fail;
2400 __ AssertNotSmi(kArgumentsList);
2401 __ mov(edx, FieldOperand(kArgumentsList, HeapObject::kMapOffset));
2402 __ CmpInstanceType(edx, FIXED_ARRAY_TYPE);
2403 __ j(equal, &ok);
2404 __ CmpInstanceType(edx, FIXED_DOUBLE_ARRAY_TYPE);
2405 __ j(not_equal, &fail);
2406 __ cmp(kArgumentsLength, 0);
2407 __ j(equal, &ok);
2408 // Fall through.
2409 __ bind(&fail);
2410 __ Abort(AbortReason::kOperandIsNotAFixedArray);
2411
2412 __ bind(&ok);
2413 }
2414
2415 // Check the stack for overflow. We are not trying to catch interruptions
2416 // (i.e. debug break and preemption) here, so check the "real stack limit".
2417 Label stack_overflow;
2418 __ StackOverflowCheck(kArgumentsLength, edx, &stack_overflow);
2419
2420 __ movd(xmm4, kArgumentsList); // Spill the arguments list.
2421 // Move the arguments already in the stack,
2422 // including the receiver and the return address.
2423 // kArgumentsLength (ecx): Number of arguments to make room for.
2424 // eax: Number of arguments already on the stack.
2425 // edx: Points to first free slot on the stack after arguments were shifted.
2426 Generate_AllocateSpaceAndShiftExistingArguments(masm, kArgumentsLength, eax,
2427 edx, edi, esi);
2428 __ movd(kArgumentsList, xmm4); // Recover arguments list.
2429 __ movd(xmm2, eax); // Spill argument count.
2430
2431 // Push additional arguments onto the stack.
2432 {
2433 __ Move(eax, Immediate(0));
2434 Label done, push, loop;
2435 __ bind(&loop);
2436 __ cmp(eax, kArgumentsLength);
2437 __ j(equal, &done, Label::kNear);
2438 // Turn the hole into undefined as we go.
2439 __ mov(edi, FieldOperand(kArgumentsList, eax, times_tagged_size,
2440 OFFSET_OF_DATA_START(FixedArray)));
2441 __ CompareRoot(edi, RootIndex::kTheHoleValue);
2442 __ j(not_equal, &push, Label::kNear);
2443 __ LoadRoot(edi, RootIndex::kUndefinedValue);
2444 __ bind(&push);
2445 __ mov(Operand(edx, 0), edi);
2446 __ add(edx, Immediate(kSystemPointerSize));
2447 __ inc(eax);
2448 __ jmp(&loop);
2449 __ bind(&done);
2450 }
2451
2452 // Restore eax, edi and edx.
2453 __ movd(esi, xmm3); // Restore the context.
2454 __ movd(eax, xmm2); // Restore argument count.
2455 __ movd(edi, xmm1); // Restore target.
2456 __ movd(edx, xmm0); // Restore new.target.
2457
2458 // Tail-call to the actual Call or Construct builtin.
2459 __ TailCallBuiltin(target_builtin);
2460
2461 __ bind(&stack_overflow);
2462 __ movd(esi, xmm3); // Restore the context.
2463 __ TailCallRuntime(Runtime::kThrowStackOverflow);
2464}
2465
2466// static
2467void Builtins::Generate_CallOrConstructForwardVarargs(MacroAssembler* masm,
2468 CallOrConstructMode mode,
2469 Builtin target_builtin) {
2470 // ----------- S t a t e -------------
2471 // -- eax : the number of arguments
2472 // -- edi : the target to call (can be any Object)
2473 // -- esi : context for the Call / Construct builtin
2474 // -- edx : the new target (for [[Construct]] calls)
2475 // -- ecx : start index (to support rest parameters)
2476 // -----------------------------------
2477
2478 __ movd(xmm0, esi); // Spill the context.
2479
2480 // Check if new.target has a [[Construct]] internal method.
2481 if (mode == CallOrConstructMode::kConstruct) {
2482 Register scratch = esi;
2483
2484 Label new_target_constructor, new_target_not_constructor;
2485 __ JumpIfSmi(edx, &new_target_not_constructor, Label::kNear);
2486 __ mov(scratch, FieldOperand(edx, HeapObject::kMapOffset));
2487 __ test_b(FieldOperand(scratch, Map::kBitFieldOffset),
2488 Immediate(Map::Bits1::IsConstructorBit::kMask));
2489 __ j(not_zero, &new_target_constructor, Label::kNear);
2490 __ bind(&new_target_not_constructor);
2491 {
2492 FrameScope scope(masm, StackFrame::MANUAL);
2493 __ EnterFrame(StackFrame::INTERNAL);
2494 __ Push(edx);
2495 __ movd(esi, xmm0); // Restore the context.
2496 __ CallRuntime(Runtime::kThrowNotConstructor);
2497 }
2498 __ bind(&new_target_constructor);
2499 }
2500
2501 __ movd(xmm1, edx); // Preserve new.target (in case of [[Construct]]).
2502
2503 Label stack_done, stack_overflow;
2504 __ mov(edx, Operand(ebp, StandardFrameConstants::kArgCOffset));
2505 __ dec(edx); // Exclude receiver.
2506 __ sub(edx, ecx);
2507 __ j(less_equal, &stack_done);
2508 {
2509 // ----------- S t a t e -------------
2510 // -- eax : the number of arguments already in the stack
2511 // -- ecx : start index (to support rest parameters)
2512 // -- edx : number of arguments to copy, i.e. arguments count - start index
2513 // -- edi : the target to call (can be any Object)
2514 // -- ebp : point to the caller stack frame
2515 // -- xmm0 : context for the Call / Construct builtin
2516 // -- xmm1 : the new target (for [[Construct]] calls)
2517 // -----------------------------------
2518
2519 // Forward the arguments from the caller frame.
2520 __ movd(xmm2, edi); // Preserve the target to call.
2521 __ StackOverflowCheck(edx, edi, &stack_overflow);
2522 __ movd(xmm3, ebx); // Preserve root register.
2523
2524 Register scratch = ebx;
2525
2526 // Move the arguments already in the stack,
2527 // including the receiver and the return address.
2528 // edx: Number of arguments to make room for.
2529 // eax: Number of arguments already on the stack.
2530 // esi: Points to first free slot on the stack after arguments were shifted.
2531 Generate_AllocateSpaceAndShiftExistingArguments(masm, edx, eax, esi, ebx,
2532 edi);
2533
2534 // Point to the first argument to copy (skipping receiver).
2535 __ lea(ecx, Operand(ecx, times_system_pointer_size,
2538 __ add(ecx, ebp);
2539
2540 // Copy the additional caller arguments onto the stack.
2541 // TODO(victorgomes): Consider using forward order as potentially more cache
2542 // friendly.
2543 {
2544 Register src = ecx, dest = esi, num = edx;
2545 Label loop;
2546 __ bind(&loop);
2547 __ dec(num);
2548 __ mov(scratch, Operand(src, num, times_system_pointer_size, 0));
2549 __ mov(Operand(dest, num, times_system_pointer_size, 0), scratch);
2550 __ j(not_zero, &loop);
2551 }
2552
2553 __ movd(ebx, xmm3); // Restore root register.
2554 __ movd(edi, xmm2); // Restore the target to call.
2555 }
2556 __ bind(&stack_done);
2557
2558 __ movd(edx, xmm1); // Restore new.target (in case of [[Construct]]).
2559 __ movd(esi, xmm0); // Restore the context.
2560
2561 // Tail-call to the actual Call or Construct builtin.
2562 __ TailCallBuiltin(target_builtin);
2563
2564 __ bind(&stack_overflow);
2565 __ movd(edi, xmm2); // Restore the target to call.
2566 __ movd(esi, xmm0); // Restore the context.
2567 __ TailCallRuntime(Runtime::kThrowStackOverflow);
2568}
2569
2570// static
2571void Builtins::Generate_CallFunction(MacroAssembler* masm,
2572 ConvertReceiverMode mode) {
2573 // ----------- S t a t e -------------
2574 // -- eax : the number of arguments
2575 // -- edi : the function to call (checked to be a JSFunction)
2576 // -----------------------------------
2577 StackArgumentsAccessor args(eax);
2578 __ AssertCallableFunction(edi, edx);
2579
2580 __ mov(edx, FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset));
2581
2582 // Enter the context of the function; ToObject has to run in the function
2583 // context, and we also need to take the global proxy from the function
2584 // context in case of conversion.
2585 __ mov(esi, FieldOperand(edi, JSFunction::kContextOffset));
2586 // We need to convert the receiver for non-native sloppy mode functions.
2587 Label done_convert;
2588 __ test(FieldOperand(edx, SharedFunctionInfo::kFlagsOffset),
2589 Immediate(SharedFunctionInfo::IsNativeBit::kMask |
2590 SharedFunctionInfo::IsStrictBit::kMask));
2591 __ j(not_zero, &done_convert);
2592 {
2593 // ----------- S t a t e -------------
2594 // -- eax : the number of arguments
2595 // -- edx : the shared function info.
2596 // -- edi : the function to call (checked to be a JSFunction)
2597 // -- esi : the function context.
2598 // -----------------------------------
2599
2601 // Patch receiver to global proxy.
2602 __ LoadGlobalProxy(ecx);
2603 } else {
2604 Label convert_to_object, convert_receiver;
2605 __ mov(ecx, args.GetReceiverOperand());
2606 __ JumpIfSmi(ecx, &convert_to_object, Label::kNear);
2607 static_assert(LAST_JS_RECEIVER_TYPE == LAST_TYPE);
2608 __ CmpObjectType(ecx, FIRST_JS_RECEIVER_TYPE, ecx); // Clobbers ecx.
2609 __ j(above_equal, &done_convert);
2610 // Reload the receiver (it was clobbered by CmpObjectType).
2611 __ mov(ecx, args.GetReceiverOperand());
2613 Label convert_global_proxy;
2614 __ JumpIfRoot(ecx, RootIndex::kUndefinedValue, &convert_global_proxy,
2615 Label::kNear);
2616 __ JumpIfNotRoot(ecx, RootIndex::kNullValue, &convert_to_object,
2617 Label::kNear);
2618 __ bind(&convert_global_proxy);
2619 {
2620 // Patch receiver to global proxy.
2621 __ LoadGlobalProxy(ecx);
2622 }
2623 __ jmp(&convert_receiver);
2624 }
2625 __ bind(&convert_to_object);
2626 {
2627 // Convert receiver using ToObject.
2628 // TODO(bmeurer): Inline the allocation here to avoid building the frame
2629 // in the fast case? (fall back to AllocateInNewSpace?)
2630 FrameScope scope(masm, StackFrame::INTERNAL);
2631 __ SmiTag(eax);
2632 __ Push(eax);
2633 __ Push(edi);
2634 __ mov(eax, ecx);
2635 __ Push(esi);
2636 __ CallBuiltin(Builtin::kToObject);
2637 __ Pop(esi);
2638 __ mov(ecx, eax);
2639 __ Pop(edi);
2640 __ Pop(eax);
2641 __ SmiUntag(eax);
2642 }
2643 __ mov(edx, FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset));
2644 __ bind(&convert_receiver);
2645 }
2646 __ mov(args.GetReceiverOperand(), ecx);
2647 }
2648 __ bind(&done_convert);
2649
2650 // ----------- S t a t e -------------
2651 // -- eax : the number of arguments
2652 // -- edx : the shared function info.
2653 // -- edi : the function to call (checked to be a JSFunction)
2654 // -- esi : the function context.
2655 // -----------------------------------
2656
2657 __ movzx_w(
2658 ecx, FieldOperand(edx, SharedFunctionInfo::kFormalParameterCountOffset));
2659 __ InvokeFunctionCode(edi, no_reg, ecx, eax, InvokeType::kJump);
2660}
2661
2662namespace {
2663
2664void Generate_PushBoundArguments(MacroAssembler* masm) {
2665 // ----------- S t a t e -------------
2666 // -- eax : the number of arguments
2667 // -- edx : new.target (only in case of [[Construct]])
2668 // -- edi : target (checked to be a JSBoundFunction)
2669 // -----------------------------------
2670 __ movd(xmm0, edx); // Spill edx.
2671
2672 // Load [[BoundArguments]] into ecx and length of that into edx.
2673 Label no_bound_arguments;
2674 __ mov(ecx, FieldOperand(edi, JSBoundFunction::kBoundArgumentsOffset));
2675 __ mov(edx, FieldOperand(ecx, offsetof(FixedArray, length_)));
2676 __ SmiUntag(edx);
2677 __ test(edx, edx);
2678 __ j(zero, &no_bound_arguments);
2679 {
2680 // ----------- S t a t e -------------
2681 // -- eax : the number of arguments
2682 // -- xmm0 : new.target (only in case of [[Construct]])
2683 // -- edi : target (checked to be a JSBoundFunction)
2684 // -- ecx : the [[BoundArguments]] (implemented as FixedArray)
2685 // -- edx : the number of [[BoundArguments]]
2686 // -----------------------------------
2687
2688 // Check the stack for overflow.
2689 {
2690 Label done, stack_overflow;
2691 __ StackOverflowCheck(edx, ecx, &stack_overflow);
2692 __ jmp(&done);
2693 __ bind(&stack_overflow);
2694 {
2695 FrameScope frame(masm, StackFrame::MANUAL);
2696 __ EnterFrame(StackFrame::INTERNAL);
2697 __ CallRuntime(Runtime::kThrowStackOverflow);
2698 __ int3();
2699 }
2700 __ bind(&done);
2701 }
2702
2703 // Spill context.
2704 __ movd(xmm3, esi);
2705
2706 // Save Return Address and Receiver into registers.
2707 __ pop(esi);
2708 __ movd(xmm1, esi);
2709 __ pop(esi);
2710 __ movd(xmm2, esi);
2711
2712 // Push [[BoundArguments]] to the stack.
2713 {
2714 Label loop;
2715 __ mov(ecx, FieldOperand(edi, JSBoundFunction::kBoundArgumentsOffset));
2716 __ mov(edx, FieldOperand(ecx, offsetof(FixedArray, length_)));
2717 __ SmiUntag(edx);
2718 // Adjust effective number of arguments (eax contains the number of
2719 // arguments from the call not including receiver plus the number of
2720 // [[BoundArguments]]).
2721 __ add(eax, edx);
2722 __ bind(&loop);
2723 __ dec(edx);
2724 __ mov(esi, FieldOperand(ecx, edx, times_tagged_size,
2725 OFFSET_OF_DATA_START(FixedArray)));
2726 __ push(esi);
2727 __ j(greater, &loop);
2728 }
2729
2730 // Restore Receiver and Return Address.
2731 __ movd(esi, xmm2);
2732 __ push(esi);
2733 __ movd(esi, xmm1);
2734 __ push(esi);
2735
2736 // Restore context.
2737 __ movd(esi, xmm3);
2738 }
2739
2740 __ bind(&no_bound_arguments);
2741 __ movd(edx, xmm0); // Reload edx.
2742}
2743
2744} // namespace
2745
2746// static
2747void Builtins::Generate_CallBoundFunctionImpl(MacroAssembler* masm) {
2748 // ----------- S t a t e -------------
2749 // -- eax : the number of arguments
2750 // -- edi : the function to call (checked to be a JSBoundFunction)
2751 // -----------------------------------
2752 __ AssertBoundFunction(edi);
2753
2754 // Patch the receiver to [[BoundThis]].
2755 StackArgumentsAccessor args(eax);
2756 __ mov(ecx, FieldOperand(edi, JSBoundFunction::kBoundThisOffset));
2757 __ mov(args.GetReceiverOperand(), ecx);
2758
2759 // Push the [[BoundArguments]] onto the stack.
2760 Generate_PushBoundArguments(masm);
2761
2762 // Call the [[BoundTargetFunction]] via the Call builtin.
2763 __ mov(edi, FieldOperand(edi, JSBoundFunction::kBoundTargetFunctionOffset));
2764 __ TailCallBuiltin(Builtins::Call());
2765}
2766
2767// static
2768void Builtins::Generate_Call(MacroAssembler* masm, ConvertReceiverMode mode) {
2769 // ----------- S t a t e -------------
2770 // -- eax : the number of arguments
2771 // -- edi : the target to call (can be any Object).
2772 // -----------------------------------
2773 Register argc = eax;
2774 Register target = edi;
2775 Register map = ecx;
2776 Register instance_type = edx;
2777 DCHECK(!AreAliased(argc, target, map, instance_type));
2778
2779 StackArgumentsAccessor args(argc);
2780
2781 Label non_callable, non_smi, non_callable_jsfunction, non_jsboundfunction,
2782 non_proxy, non_wrapped_function, class_constructor;
2783 __ JumpIfSmi(target, &non_callable);
2784 __ bind(&non_smi);
2785 __ LoadMap(map, target);
2786 __ CmpInstanceTypeRange(map, instance_type, map,
2789 __ j(above, &non_callable_jsfunction);
2790 __ TailCallBuiltin(Builtins::CallFunction(mode));
2791
2792 __ bind(&non_callable_jsfunction);
2793 __ cmpw(instance_type, Immediate(JS_BOUND_FUNCTION_TYPE));
2794 __ j(not_equal, &non_jsboundfunction);
2795 __ TailCallBuiltin(Builtin::kCallBoundFunction);
2796
2797 // Check if target is a proxy and call CallProxy external builtin
2798 __ bind(&non_jsboundfunction);
2799 __ LoadMap(map, target);
2800 __ test_b(FieldOperand(map, Map::kBitFieldOffset),
2801 Immediate(Map::Bits1::IsCallableBit::kMask));
2802 __ j(zero, &non_callable);
2803
2804 // Call CallProxy external builtin
2805 __ cmpw(instance_type, Immediate(JS_PROXY_TYPE));
2806 __ j(not_equal, &non_proxy);
2807 __ TailCallBuiltin(Builtin::kCallProxy);
2808
2809 // Check if target is a wrapped function and call CallWrappedFunction external
2810 // builtin
2811 __ bind(&non_proxy);
2812 __ cmpw(instance_type, Immediate(JS_WRAPPED_FUNCTION_TYPE));
2813 __ j(not_equal, &non_wrapped_function);
2814 __ TailCallBuiltin(Builtin::kCallWrappedFunction);
2815
2816 // ES6 section 9.2.1 [[Call]] ( thisArgument, argumentsList)
2817 // Check that the function is not a "classConstructor".
2818 __ bind(&non_wrapped_function);
2819 __ cmpw(instance_type, Immediate(JS_CLASS_CONSTRUCTOR_TYPE));
2820 __ j(equal, &class_constructor);
2821
2822 // 2. Call to something else, which might have a [[Call]] internal method (if
2823 // not we raise an exception).
2824 // Overwrite the original receiver with the (original) target.
2825 __ mov(args.GetReceiverOperand(), target);
2826 // Let the "call_as_function_delegate" take care of the rest.
2827 __ LoadNativeContextSlot(target, Context::CALL_AS_FUNCTION_DELEGATE_INDEX);
2828 __ TailCallBuiltin(
2830
2831 // 3. Call to something that is not callable.
2832 __ bind(&non_callable);
2833 {
2834 FrameScope scope(masm, StackFrame::INTERNAL);
2835 __ Push(target);
2836 __ CallRuntime(Runtime::kThrowCalledNonCallable);
2837 __ Trap(); // Unreachable.
2838 }
2839
2840 // 4. The function is a "classConstructor", need to raise an exception.
2841 __ bind(&class_constructor);
2842 {
2843 FrameScope frame(masm, StackFrame::INTERNAL);
2844 __ Push(target);
2845 __ CallRuntime(Runtime::kThrowConstructorNonCallableError);
2846 __ Trap(); // Unreachable.
2847 }
2848}
2849
2850// static
2851void Builtins::Generate_ConstructFunction(MacroAssembler* masm) {
2852 // ----------- S t a t e -------------
2853 // -- eax : the number of arguments
2854 // -- edx : the new target (checked to be a constructor)
2855 // -- edi : the constructor to call (checked to be a JSFunction)
2856 // -----------------------------------
2857 __ AssertConstructor(edi);
2858 __ AssertFunction(edi, ecx);
2859
2860 Label call_generic_stub;
2861
2862 // Jump to JSBuiltinsConstructStub or JSConstructStubGeneric.
2863 __ mov(ecx, FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset));
2864 __ test(FieldOperand(ecx, SharedFunctionInfo::kFlagsOffset),
2865 Immediate(SharedFunctionInfo::ConstructAsBuiltinBit::kMask));
2866 __ j(zero, &call_generic_stub, Label::kNear);
2867
2868 // Calling convention for function specific ConstructStubs require
2869 // ecx to contain either an AllocationSite or undefined.
2870 __ LoadRoot(ecx, RootIndex::kUndefinedValue);
2871 __ TailCallBuiltin(Builtin::kJSBuiltinsConstructStub);
2872
2873 __ bind(&call_generic_stub);
2874 // Calling convention for function specific ConstructStubs require
2875 // ecx to contain either an AllocationSite or undefined.
2876 __ LoadRoot(ecx, RootIndex::kUndefinedValue);
2877 __ TailCallBuiltin(Builtin::kJSConstructStubGeneric);
2878}
2879
2880// static
2881void Builtins::Generate_ConstructBoundFunction(MacroAssembler* masm) {
2882 // ----------- S t a t e -------------
2883 // -- eax : the number of arguments
2884 // -- edx : the new target (checked to be a constructor)
2885 // -- edi : the constructor to call (checked to be a JSBoundFunction)
2886 // -----------------------------------
2887 __ AssertConstructor(edi);
2888 __ AssertBoundFunction(edi);
2889
2890 // Push the [[BoundArguments]] onto the stack.
2891 Generate_PushBoundArguments(masm);
2892
2893 // Patch new.target to [[BoundTargetFunction]] if new.target equals target.
2894 {
2895 Label done;
2896 __ cmp(edi, edx);
2897 __ j(not_equal, &done, Label::kNear);
2898 __ mov(edx, FieldOperand(edi, JSBoundFunction::kBoundTargetFunctionOffset));
2899 __ bind(&done);
2900 }
2901
2902 // Construct the [[BoundTargetFunction]] via the Construct builtin.
2903 __ mov(edi, FieldOperand(edi, JSBoundFunction::kBoundTargetFunctionOffset));
2904 __ TailCallBuiltin(Builtin::kConstruct);
2905}
2906
2907// static
2908void Builtins::Generate_Construct(MacroAssembler* masm) {
2909 // ----------- S t a t e -------------
2910 // -- eax : the number of arguments
2911 // -- edx : the new target (either the same as the constructor or
2912 // the JSFunction on which new was invoked initially)
2913 // -- edi : the constructor to call (can be any Object)
2914 // -----------------------------------
2915 Register argc = eax;
2916 Register target = edi;
2917 Register map = ecx;
2918 DCHECK(!AreAliased(argc, target, map));
2919
2920 StackArgumentsAccessor args(argc);
2921
2922 // Check if target is a Smi.
2923 Label non_constructor, non_proxy, non_jsfunction, non_jsboundfunction;
2924 __ JumpIfSmi(target, &non_constructor);
2925
2926 // Check if target has a [[Construct]] internal method.
2927 __ mov(map, FieldOperand(target, HeapObject::kMapOffset));
2928 __ test_b(FieldOperand(map, Map::kBitFieldOffset),
2929 Immediate(Map::Bits1::IsConstructorBit::kMask));
2930 __ j(zero, &non_constructor);
2931
2932 // Dispatch based on instance type.
2933 __ CmpInstanceTypeRange(map, map, map, FIRST_JS_FUNCTION_TYPE,
2934 LAST_JS_FUNCTION_TYPE);
2935 __ j(above, &non_jsfunction);
2936 __ TailCallBuiltin(Builtin::kConstructFunction);
2937
2938 // Only dispatch to bound functions after checking whether they are
2939 // constructors.
2940 __ bind(&non_jsfunction);
2941 __ mov(map, FieldOperand(target, HeapObject::kMapOffset));
2942 __ CmpInstanceType(map, JS_BOUND_FUNCTION_TYPE);
2943 __ j(not_equal, &non_jsboundfunction);
2944 __ TailCallBuiltin(Builtin::kConstructBoundFunction);
2945
2946 // Only dispatch to proxies after checking whether they are constructors.
2947 __ bind(&non_jsboundfunction);
2948 __ CmpInstanceType(map, JS_PROXY_TYPE);
2949 __ j(not_equal, &non_proxy);
2950 __ TailCallBuiltin(Builtin::kConstructProxy);
2951
2952 // Called Construct on an exotic Object with a [[Construct]] internal method.
2953 __ bind(&non_proxy);
2954 {
2955 // Overwrite the original receiver with the (original) target.
2956 __ mov(args.GetReceiverOperand(), target);
2957 // Let the "call_as_constructor_delegate" take care of the rest.
2958 __ LoadNativeContextSlot(target,
2959 Context::CALL_AS_CONSTRUCTOR_DELEGATE_INDEX);
2960 __ TailCallBuiltin(Builtins::CallFunction());
2961 }
2962
2963 // Called Construct on an Object that doesn't have a [[Construct]] internal
2964 // method.
2965 __ bind(&non_constructor);
2966 __ TailCallBuiltin(Builtin::kConstructedNonConstructable);
2967}
2968
2969namespace {
2970
2971void Generate_OSREntry(MacroAssembler* masm, Register entry_address) {
2972 ASM_CODE_COMMENT(masm);
2973 // Overwrite the return address on the stack.
2974 __ mov(Operand(esp, 0), entry_address);
2975
2976 // And "return" to the OSR entry point of the function.
2977 __ ret(0);
2978}
2979
2980enum class OsrSourceTier {
2981 kInterpreter,
2982 kBaseline,
2983};
2984
2985void OnStackReplacement(MacroAssembler* masm, OsrSourceTier source,
2986 Register maybe_target_code,
2987 Register expected_param_count) {
2988 Label jump_to_optimized_code;
2989 {
2990 // If maybe_target_code is not null, no need to call into runtime. A
2991 // precondition here is: if maybe_target_code is an InstructionStream
2992 // object, it must NOT be marked_for_deoptimization (callers must ensure
2993 // this).
2994 __ cmp(maybe_target_code, Immediate(0));
2995 __ j(not_equal, &jump_to_optimized_code, Label::kNear);
2996 }
2997
2998 ASM_CODE_COMMENT(masm);
2999 {
3000 FrameScope scope(masm, StackFrame::INTERNAL);
3001 __ CallRuntime(Runtime::kCompileOptimizedOSR);
3002 }
3003
3004 // If the code object is null, just return to the caller.
3005 __ cmp(eax, Immediate(0));
3006 __ j(not_equal, &jump_to_optimized_code, Label::kNear);
3007 __ ret(0);
3008
3009 __ bind(&jump_to_optimized_code);
3010 DCHECK_EQ(maybe_target_code, eax); // Already in the right spot.
3011
3012 // OSR entry tracing.
3013 {
3014 Label next;
3015 __ cmpb(__ ExternalReferenceAsOperand(
3016 ExternalReference::address_of_log_or_trace_osr(), ecx),
3017 Immediate(0));
3018 __ j(equal, &next, Label::kNear);
3019
3020 {
3021 FrameScope scope(masm, StackFrame::INTERNAL);
3022 __ Push(eax); // Preserve the code object.
3023 __ CallRuntime(Runtime::kLogOrTraceOptimizedOSREntry, 0);
3024 __ Pop(eax);
3025 }
3026
3027 __ bind(&next);
3028 }
3029
3030 if (source == OsrSourceTier::kInterpreter) {
3031 // Drop the handler frame that is be sitting on top of the actual
3032 // JavaScript frame. This is the case then OSR is triggered from bytecode.
3033 __ leave();
3034 }
3035
3036 // The sandbox would rely on testing expected_parameter_count here.
3037 static_assert(!V8_ENABLE_SANDBOX_BOOL);
3038
3039 // Load deoptimization data from the code object.
3040 __ mov(ecx, Operand(eax, Code::kDeoptimizationDataOrInterpreterDataOffset -
3042
3043 // Load the OSR entrypoint offset from the deoptimization data.
3044 __ mov(ecx, Operand(ecx, FixedArray::OffsetOfElementAt(
3047 __ SmiUntag(ecx);
3048
3049 __ LoadCodeInstructionStart(eax, eax);
3050
3051 // Compute the target address = code_entry + osr_offset
3052 __ add(eax, ecx);
3053
3054 Generate_OSREntry(masm, eax);
3055}
3056
3057} // namespace
3058
3059void Builtins::Generate_InterpreterOnStackReplacement(MacroAssembler* masm) {
3060 using D = OnStackReplacementDescriptor;
3061 static_assert(D::kParameterCount == 2);
3062 OnStackReplacement(masm, OsrSourceTier::kInterpreter,
3063 D::MaybeTargetCodeRegister(),
3064 D::ExpectedParameterCountRegister());
3065}
3066
3067void Builtins::Generate_BaselineOnStackReplacement(MacroAssembler* masm) {
3068 using D = OnStackReplacementDescriptor;
3069 static_assert(D::kParameterCount == 2);
3070
3071 __ mov(kContextRegister,
3073 OnStackReplacement(masm, OsrSourceTier::kBaseline,
3074 D::MaybeTargetCodeRegister(),
3075 D::ExpectedParameterCountRegister());
3076}
3077
3078#if V8_ENABLE_WEBASSEMBLY
3079
3080// Returns the offset beyond the last saved FP register.
3081int SaveWasmParams(MacroAssembler* masm) {
3082 // Save all parameter registers (see wasm-linkage.h). They might be
3083 // overwritten in the subsequent runtime call. We don't have any callee-saved
3084 // registers in wasm, so no need to store anything else.
3087 "frame size mismatch");
3088 for (Register reg : wasm::kGpParamRegisters) {
3089 __ Push(reg);
3090 }
3093 "frame size mismatch");
3094 __ AllocateStackSpace(kSimd128Size * arraysize(wasm::kFpParamRegisters));
3095 int offset = 0;
3097 __ movdqu(Operand(esp, offset), reg);
3099 }
3100 return offset;
3101}
3102
3103// Consumes the offset beyond the last saved FP register (as returned by
3104// {SaveWasmParams}).
3105void RestoreWasmParams(MacroAssembler* masm, int offset) {
3108 __ movdqu(reg, Operand(esp, offset));
3109 }
3110 DCHECK_EQ(0, offset);
3111 __ add(esp, Immediate(kSimd128Size * arraysize(wasm::kFpParamRegisters)));
3112 for (Register reg : base::Reversed(wasm::kGpParamRegisters)) {
3113 __ Pop(reg);
3114 }
3115}
3116
3117// When this builtin is called, the topmost stack entry is the calling pc.
3118// This is replaced with the following:
3119//
3120// [ calling pc ] <-- esp; popped by {ret}.
3121// [ feedback vector ]
3122// [ Wasm instance data ]
3123// [ WASM frame marker ]
3124// [ saved ebp ] <-- ebp; this is where "calling pc" used to be.
3125void Builtins::Generate_WasmLiftoffFrameSetup(MacroAssembler* masm) {
3126 constexpr Register func_index = wasm::kLiftoffFrameSetupFunctionReg;
3127
3128 // We have zero free registers at this point. Free up a temp. Its value
3129 // could be tagged, but we're only storing it on the stack for a short
3130 // while, and no GC or stack walk can happen during this time.
3131 Register tmp = eax; // Arbitrarily chosen.
3132 __ Push(tmp); // This is the "marker" slot.
3133 {
3134 Operand saved_ebp_slot = Operand(esp, kSystemPointerSize);
3135 __ mov(tmp, saved_ebp_slot); // tmp now holds the "calling pc".
3136 __ mov(saved_ebp_slot, ebp);
3137 __ lea(ebp, Operand(esp, kSystemPointerSize));
3138 }
3139 __ Push(tmp); // This is the "instance" slot.
3140
3141 // Stack layout is now:
3142 // [calling pc] <-- instance_data_slot <-- esp
3143 // [saved tmp] <-- marker_slot
3144 // [saved ebp]
3145 Operand marker_slot = Operand(ebp, WasmFrameConstants::kFrameTypeOffset);
3146 Operand instance_data_slot =
3147 Operand(ebp, WasmFrameConstants::kWasmInstanceDataOffset);
3148
3149 // Load the feedback vector from the trusted instance data.
3151 WasmTrustedInstanceData::kFeedbackVectorsOffset));
3152 __ mov(tmp, FieldOperand(tmp, func_index, times_tagged_size,
3153 OFFSET_OF_DATA_START(FixedArray)));
3154 Label allocate_vector;
3155 __ JumpIfSmi(tmp, &allocate_vector);
3156
3157 // Vector exists. Finish setting up the stack frame.
3158 __ Push(tmp); // Feedback vector.
3159 __ mov(tmp, instance_data_slot); // Calling PC.
3160 __ Push(tmp);
3161 __ mov(instance_data_slot, kWasmImplicitArgRegister);
3162 __ mov(tmp, marker_slot);
3163 __ mov(marker_slot, Immediate(StackFrame::TypeToMarker(StackFrame::WASM)));
3164 __ ret(0);
3165
3166 __ bind(&allocate_vector);
3167 // Feedback vector doesn't exist yet. Call the runtime to allocate it.
3168 // We temporarily change the frame type for this, because we need special
3169 // handling by the stack walker in case of GC.
3170 // For the runtime call, we create the following stack layout:
3171 //
3172 // [ reserved slot for NativeModule ] <-- arg[2]
3173 // [ ("declared") function index ] <-- arg[1] for runtime func.
3174 // [ Wasm instance data ] <-- arg[0]
3175 // [ ...spilled Wasm parameters... ]
3176 // [ calling pc ] <-- already in place
3177 // [ WASM_LIFTOFF_SETUP marker ]
3178 // [ saved ebp ] <-- already in place
3179
3180 __ mov(tmp, marker_slot);
3181 __ mov(marker_slot,
3182 Immediate(StackFrame::TypeToMarker(StackFrame::WASM_LIFTOFF_SETUP)));
3183
3184 int offset = SaveWasmParams(masm);
3185
3186 // Arguments to the runtime function: instance, func_index.
3188 __ SmiTag(func_index);
3189 __ Push(func_index);
3190 // Allocate a stack slot where the runtime function can spill a pointer
3191 // to the NativeModule.
3192 __ Push(esp);
3193 __ Move(kContextRegister, Smi::zero());
3194 __ CallRuntime(Runtime::kWasmAllocateFeedbackVector, 3);
3195 tmp = func_index;
3196 __ mov(tmp, kReturnRegister0);
3197
3198 RestoreWasmParams(masm, offset);
3199
3200 // Finish setting up the stack frame:
3201 // [ calling pc ]
3202 // (tmp reg) ---> [ feedback vector ]
3203 // [ calling pc ] => [ Wasm instance data ] <-- instance_data_slot
3204 // [ WASM_LIFTOFF_SETUP ] [ WASM ] <-- marker_slot
3205 // [ saved ebp ] [ saved ebp ]
3206 __ mov(marker_slot, Immediate(StackFrame::TypeToMarker(StackFrame::WASM)));
3207 __ Push(tmp); // Feedback vector.
3208 __ mov(tmp, instance_data_slot); // Calling PC.
3209 __ Push(tmp);
3210 __ mov(instance_data_slot, kWasmImplicitArgRegister);
3211 __ ret(0);
3212}
3213
3214void Builtins::Generate_WasmCompileLazy(MacroAssembler* masm) {
3215 // The function index was put in edi by the jump table trampoline.
3216 // Convert to Smi for the runtime call.
3218 {
3219 HardAbortScope hard_abort(masm); // Avoid calls to Abort.
3220 FrameScope scope(masm, StackFrame::INTERNAL);
3221 int offset = SaveWasmParams(masm);
3222
3223 // Push arguments for the runtime function.
3226 // Initialize the JavaScript context with 0. CEntry will use it to
3227 // set the current context on the isolate.
3228 __ Move(kContextRegister, Smi::zero());
3229 __ CallRuntime(Runtime::kWasmCompileLazy, 2);
3230 // The runtime function returns the jump table slot offset as a Smi. Use
3231 // that to compute the jump target in edi.
3233 __ mov(edi, kReturnRegister0);
3234
3235 RestoreWasmParams(masm, offset);
3236
3237 // After the instance data register has been restored, we can add the jump
3238 // table start to the jump table offset already stored in edi.
3240 WasmTrustedInstanceData::kJumpTableStartOffset -
3242 }
3243
3244 // Finally, jump to the jump table slot for the function.
3245 __ jmp(edi);
3246}
3247
3248void Builtins::Generate_WasmDebugBreak(MacroAssembler* masm) {
3249 HardAbortScope hard_abort(masm); // Avoid calls to Abort.
3250 {
3251 FrameScope scope(masm, StackFrame::WASM_DEBUG_BREAK);
3252
3253 // Save all parameter registers. They might hold live values, we restore
3254 // them after the runtime call.
3255 for (Register reg :
3257 __ Push(reg);
3258 }
3259
3260 constexpr int kFpStackSize =
3262 __ AllocateStackSpace(kFpStackSize);
3263 int offset = kFpStackSize;
3264 for (DoubleRegister reg :
3267 __ movdqu(Operand(esp, offset), reg);
3268 }
3269
3270 // Initialize the JavaScript context with 0. CEntry will use it to
3271 // set the current context on the isolate.
3272 __ Move(kContextRegister, Smi::zero());
3273 __ CallRuntime(Runtime::kWasmDebugBreak, 0);
3274
3275 // Restore registers.
3277 __ movdqu(reg, Operand(esp, offset));
3279 }
3280 __ add(esp, Immediate(kFpStackSize));
3282 __ Pop(reg);
3283 }
3284 }
3285
3286 __ ret(0);
3287}
3288
3289namespace {
3290// Check that the stack was in the old state (if generated code assertions are
3291// enabled), and switch to the new state.
3292void SwitchStackState(MacroAssembler* masm, Register stack,
3294 wasm::JumpBuffer::StackState new_state) {
3295 __ cmp(MemOperand(stack, wasm::kStackStateOffset), Immediate(old_state));
3296 Label ok;
3297 __ j(equal, &ok, Label::kNear);
3298 __ Trap();
3299 __ bind(&ok);
3300 __ mov(MemOperand(stack, wasm::kStackStateOffset), Immediate(new_state));
3301}
3302
3303void FillJumpBuffer(MacroAssembler* masm, Register stack, Register scratch,
3304 Label* pc) {
3305 DCHECK(!AreAliased(scratch, stack));
3306
3307 __ mov(MemOperand(stack, wasm::kStackSpOffset), esp);
3308 __ mov(MemOperand(stack, wasm::kStackFpOffset), ebp);
3309 __ mov(scratch, __ StackLimitAsOperand(StackLimitKind::kRealStackLimit));
3310 __ mov(MemOperand(stack, wasm::kStackLimitOffset), scratch);
3311 __ LoadLabelAddress(scratch, pc);
3312 __ mov(MemOperand(stack, wasm::kStackPcOffset), scratch);
3313}
3314
3315void LoadJumpBuffer(MacroAssembler* masm, Register stack, bool load_pc,
3316 wasm::JumpBuffer::StackState expected_state) {
3317 __ mov(esp, MemOperand(stack, wasm::kStackSpOffset));
3318 __ mov(ebp, MemOperand(stack, wasm::kStackFpOffset));
3319 SwitchStackState(masm, stack, expected_state, wasm::JumpBuffer::Active);
3320 if (load_pc) {
3321 __ jmp(MemOperand(stack, wasm::kStackPcOffset));
3322 }
3323 // The stack limit is set separately under the ExecutionAccess lock.
3324}
3325
3326void LoadTargetJumpBuffer(MacroAssembler* masm, Register target_stack,
3327 wasm::JumpBuffer::StackState expected_state) {
3328 MemOperand GCScanSlotPlace =
3329 MemOperand(ebp, StackSwitchFrameConstants::kGCScanSlotCountOffset);
3330 __ Move(GCScanSlotPlace, Immediate(0));
3331 // Switch stack!
3332 LoadJumpBuffer(masm, target_stack, false, expected_state);
3333}
3334
3335// Updates the stack limit and central stack info, and validates the switch.
3336void SwitchStacks(MacroAssembler* masm, Register old_stack, bool return_switch,
3337 const std::initializer_list<Register> keep) {
3338 using ER = ExternalReference;
3339 for (auto reg : keep) {
3340 __ Push(reg);
3341 }
3342 FrameScope scope(masm, StackFrame::MANUAL);
3343 __ PrepareCallCFunction(2, eax);
3344 __ Move(Operand(esp, 0 * kSystemPointerSize),
3345 Immediate(ER::isolate_address(masm->isolate())));
3346 __ mov(Operand(esp, 1 * kSystemPointerSize), old_stack);
3347 __ CallCFunction(
3348 return_switch ? ER::wasm_return_switch() : ER::wasm_switch_stacks(), 2);
3349 for (auto it = std::rbegin(keep); it != std::rend(keep); ++it) {
3350 __ Pop(*it);
3351 }
3352}
3353
3354void ReloadParentStack(MacroAssembler* masm, Register promise,
3355 Register return_value, Register context, Register tmp,
3356 Register tmp2) {
3357 Register active_stack = tmp;
3358 __ LoadRootRelative(active_stack, IsolateData::active_stack_offset());
3359
3360 DCHECK(!AreAliased(promise, return_value, context, tmp));
3361
3362 __ Push(promise);
3363
3364 // We don't need to save the full register state since we are switching out of
3365 // this stack for the last time. Mark the stack as retired.
3366 SwitchStackState(masm, active_stack, wasm::JumpBuffer::Active,
3368
3369 Register parent = tmp2;
3370 __ mov(parent, Operand(active_stack, wasm::kStackParentOffset));
3371
3372 // Update active stack root.
3373 __ StoreRootRelative(IsolateData::active_stack_offset(), parent);
3374
3375 __ Pop(promise);
3376 // Switch stack!
3377 SwitchStacks(masm, active_stack, true,
3378 {promise, return_value, context, parent});
3379 LoadJumpBuffer(masm, parent, false, wasm::JumpBuffer::Inactive);
3380}
3381
3382// Loads the context field of the WasmTrustedInstanceData or WasmImportData
3383// depending on the data's type, and places the result in the input register.
3384void GetContextFromImplicitArg(MacroAssembler* masm, Register data,
3385 Register scratch) {
3386 __ Move(scratch, FieldOperand(data, HeapObject::kMapOffset));
3387 __ CmpInstanceType(scratch, WASM_TRUSTED_INSTANCE_DATA_TYPE);
3388 Label instance;
3389 Label end;
3390 __ j(equal, &instance);
3391 __ Move(data, FieldOperand(data, WasmImportData::kNativeContextOffset));
3392 __ jmp(&end);
3393 __ bind(&instance);
3394 __ Move(data,
3395 FieldOperand(data, WasmTrustedInstanceData::kNativeContextOffset));
3396 __ bind(&end);
3397}
3398
3399void RestoreParentSuspender(MacroAssembler* masm, Register tmp1) {
3400 Register suspender = tmp1;
3401 __ LoadRoot(suspender, RootIndex::kActiveSuspender);
3402 __ Move(suspender,
3403 FieldOperand(suspender, WasmSuspenderObject::kParentOffset));
3404 __ CompareRoot(suspender, RootIndex::kUndefinedValue);
3405 __ mov(masm->RootAsOperand(RootIndex::kActiveSuspender), suspender);
3406}
3407
3408void ResetStackSwitchFrameStackSlots(MacroAssembler* masm) {
3409 __ mov(MemOperand(ebp, StackSwitchFrameConstants::kImplicitArgOffset),
3410 Immediate(0));
3411 __ mov(MemOperand(ebp, StackSwitchFrameConstants::kResultArrayOffset),
3412 Immediate(0));
3413}
3414
3415void SwitchToAllocatedStack(MacroAssembler* masm, Register wrapper_buffer,
3416 Register original_fp, Register new_wrapper_buffer,
3417 Register scratch, Register scratch2,
3418 Label* suspend) {
3419 ResetStackSwitchFrameStackSlots(masm);
3420 Register parent_stack = new_wrapper_buffer;
3421 __ LoadRootRelative(parent_stack, IsolateData::active_stack_offset());
3422 __ Move(parent_stack, Operand(parent_stack, wasm::kStackParentOffset));
3423 FillJumpBuffer(masm, parent_stack, scratch, suspend);
3424 SwitchStacks(masm, parent_stack, false, {wrapper_buffer});
3425 parent_stack = no_reg;
3426 Register target_stack = scratch;
3427 __ LoadRootRelative(target_stack, IsolateData::active_stack_offset());
3428 // Save the old stack's ebp, and use it to access the parameters in
3429 // the parent frame.
3430 __ mov(original_fp, ebp);
3431 LoadTargetJumpBuffer(masm, target_stack, wasm::JumpBuffer::Suspended);
3432 // Return address slot. The builtin itself returns by switching to the parent
3433 // jump buffer and does not actually use this slot, but it is read by the
3434 // profiler.
3435 __ Push(Immediate(0));
3436 // Push the loaded ebp. We know it is null, because there is no frame yet,
3437 // so we could also push 0 directly. In any case we need to push it, because
3438 // this marks the base of the stack segment for the stack frame iterator.
3439 __ EnterFrame(StackFrame::STACK_SWITCH);
3440 int stack_space =
3441 StackSwitchFrameConstants::kNumSpillSlots * kSystemPointerSize +
3442 JSToWasmWrapperFrameConstants::kWrapperBufferSize;
3443 __ AllocateStackSpace(stack_space);
3444 __ AlignStackPointer();
3445 __ mov(new_wrapper_buffer, esp);
3446 // Copy data needed for return handling from old wrapper buffer to new one.
3447 __ mov(scratch,
3448 MemOperand(wrapper_buffer,
3449 JSToWasmWrapperFrameConstants::kWrapperBufferReturnCount));
3450 __ mov(MemOperand(new_wrapper_buffer,
3451 JSToWasmWrapperFrameConstants::kWrapperBufferReturnCount),
3452 scratch);
3453 __ mov(
3454 scratch,
3455 MemOperand(wrapper_buffer,
3456 JSToWasmWrapperFrameConstants::kWrapperBufferRefReturnCount));
3457 __ mov(
3458 MemOperand(new_wrapper_buffer,
3459 JSToWasmWrapperFrameConstants::kWrapperBufferRefReturnCount),
3460 scratch);
3461 __ mov(
3462 scratch,
3463 MemOperand(
3464 wrapper_buffer,
3465 JSToWasmWrapperFrameConstants::kWrapperBufferSigRepresentationArray));
3466 __ mov(
3467 MemOperand(
3468 new_wrapper_buffer,
3469 JSToWasmWrapperFrameConstants::kWrapperBufferSigRepresentationArray),
3470 scratch);
3471 __ mov(
3472 scratch,
3473 MemOperand(
3474 wrapper_buffer,
3475 JSToWasmWrapperFrameConstants::kWrapperBufferSigRepresentationArray +
3476 4));
3477 __ mov(
3478 MemOperand(
3479 new_wrapper_buffer,
3480 JSToWasmWrapperFrameConstants::kWrapperBufferSigRepresentationArray +
3481 4),
3482 scratch);
3483}
3484
3485void SwitchBackAndReturnPromise(MacroAssembler* masm, Register tmp,
3486 Register tmp2, wasm::Promise mode,
3487 Label* return_promise) {
3488 // The return value of the wasm function becomes the parameter of the
3489 // FulfillPromise builtin, and the promise is the return value of this
3490 // wrapper.
3491
3492 static const Builtin_FulfillPromise_InterfaceDescriptor desc;
3493 static_assert(kReturnRegister0 == desc.GetRegisterParameter(0));
3494
3495 Register promise = desc.GetRegisterParameter(0);
3496 Register return_value = desc.GetRegisterParameter(1);
3497
3498 if (mode == wasm::kPromise) {
3499 __ mov(return_value, kReturnRegister0);
3500 __ LoadRoot(promise, RootIndex::kActiveSuspender);
3501 __ Move(promise,
3502 FieldOperand(promise, WasmSuspenderObject::kPromiseOffset));
3503 }
3504 __ mov(kContextRegister,
3505 MemOperand(ebp, StackSwitchFrameConstants::kImplicitArgOffset));
3506 GetContextFromImplicitArg(masm, kContextRegister, tmp);
3507
3508 ReloadParentStack(masm, promise, return_value, kContextRegister, tmp, tmp2);
3509 RestoreParentSuspender(masm, tmp);
3510
3511 if (mode == wasm::kPromise) {
3512 __ Move(MemOperand(ebp, StackSwitchFrameConstants::kGCScanSlotCountOffset),
3513 Immediate(1));
3514 __ Push(promise);
3515 __ CallBuiltin(Builtin::kFulfillPromise);
3516 __ Pop(promise);
3517 }
3518
3519 __ bind(return_promise);
3520}
3521
3522void GenerateExceptionHandlingLandingPad(MacroAssembler* masm,
3523 Label* return_promise) {
3524 int catch_handler = __ pc_offset();
3525
3526 // Restore esp to free the reserved stack slots for the sections.
3527 __ lea(esp, MemOperand(ebp, StackSwitchFrameConstants::kLastSpillOffset));
3528
3529 // Unset thread_in_wasm_flag.
3530 Register thread_in_wasm_flag_addr = ecx;
3531 __ mov(
3532 thread_in_wasm_flag_addr,
3534 __ mov(MemOperand(thread_in_wasm_flag_addr, 0), Immediate(0));
3535 thread_in_wasm_flag_addr = no_reg;
3536
3537 // The exception becomes the parameter of the RejectPromise builtin, and the
3538 // promise is the return value of this wrapper.
3539 static const Builtin_RejectPromise_InterfaceDescriptor desc;
3540 constexpr Register promise = desc.GetRegisterParameter(0);
3541 constexpr Register reason = desc.GetRegisterParameter(1);
3542 DCHECK(kReturnRegister0 == promise);
3543
3544 __ mov(reason, kReturnRegister0);
3545
3546 __ LoadRoot(promise, RootIndex::kActiveSuspender);
3547 __ Move(promise, FieldOperand(promise, WasmSuspenderObject::kPromiseOffset));
3548
3549 __ mov(kContextRegister,
3550 MemOperand(ebp, StackSwitchFrameConstants::kImplicitArgOffset));
3551 constexpr Register tmp1 = edi;
3552 static_assert(tmp1 != promise && tmp1 != reason && tmp1 != kContextRegister);
3553 constexpr Register tmp2 = edx;
3554 static_assert(tmp2 != promise && tmp2 != reason && tmp2 != kContextRegister);
3555 GetContextFromImplicitArg(masm, kContextRegister, tmp1);
3556 ReloadParentStack(masm, promise, reason, kContextRegister, tmp1, tmp2);
3557 RestoreParentSuspender(masm, edi);
3558
3559 __ Move(MemOperand(ebp, StackSwitchFrameConstants::kGCScanSlotCountOffset),
3560 Immediate(1));
3561 __ Push(promise);
3562 Register debug_event = desc.GetRegisterParameter(2);
3563 __ LoadRoot(debug_event, RootIndex::kTrueValue);
3564 __ CallBuiltin(Builtin::kRejectPromise);
3565 __ Pop(promise);
3566
3567 // Run the rest of the wrapper normally (switch to the old stack,
3568 // deconstruct the frame, ...).
3569 __ jmp(return_promise);
3570
3571 masm->isolate()->builtins()->SetJSPIPromptHandlerOffset(catch_handler);
3572}
3573
3574void JSToWasmWrapperHelper(MacroAssembler* masm, wasm::Promise mode) {
3575 bool stack_switch = mode == wasm::kPromise || mode == wasm::kStressSwitch;
3576 __ EnterFrame(stack_switch ? StackFrame::STACK_SWITCH
3577 : StackFrame::JS_TO_WASM);
3578
3579 constexpr int kNumSpillSlots = StackSwitchFrameConstants::kNumSpillSlots;
3580 __ sub(esp, Immediate(kNumSpillSlots * kSystemPointerSize));
3581
3582 ResetStackSwitchFrameStackSlots(masm);
3583
3584 Register wrapper_buffer =
3586
3587 Register original_fp = stack_switch ? esi : ebp;
3588 Register new_wrapper_buffer = stack_switch ? ecx : wrapper_buffer;
3589
3590 Label suspend;
3591 if (stack_switch) {
3592 SwitchToAllocatedStack(masm, wrapper_buffer, original_fp,
3593 new_wrapper_buffer, eax, edx, &suspend);
3594 }
3595 __ mov(MemOperand(ebp, JSToWasmWrapperFrameConstants::kWrapperBufferOffset),
3596 new_wrapper_buffer);
3597 if (stack_switch) {
3598 // Preserve wasm_instance across the switch.
3599 __ mov(eax, MemOperand(original_fp,
3600 JSToWasmWrapperFrameConstants::kImplicitArgOffset));
3601 __ mov(MemOperand(ebp, StackSwitchFrameConstants::kImplicitArgOffset), eax);
3602
3603 Register result_array = eax;
3604 __ mov(result_array,
3605 MemOperand(original_fp,
3606 JSToWasmWrapperFrameConstants::kResultArrayParamOffset));
3607 __ mov(MemOperand(ebp, StackSwitchFrameConstants::kResultArrayOffset),
3608 result_array);
3609 }
3610
3611 Register result_size = eax;
3612 original_fp = no_reg;
3613
3614 MemOperand GCScanSlotPlace =
3615 MemOperand(ebp, StackSwitchFrameConstants::kGCScanSlotCountOffset);
3616 __ Move(GCScanSlotPlace, Immediate(0));
3617
3618 __ mov(
3619 result_size,
3620 MemOperand(
3621 wrapper_buffer,
3622 JSToWasmWrapperFrameConstants::kWrapperBufferStackReturnBufferSize));
3623 __ shl(result_size, kSystemPointerSizeLog2);
3624 __ sub(esp, result_size);
3625 __ mov(
3626 MemOperand(
3627 new_wrapper_buffer,
3628 JSToWasmWrapperFrameConstants::kWrapperBufferStackReturnBufferStart),
3629 esp);
3630
3631 result_size = no_reg;
3632 new_wrapper_buffer = no_reg;
3633
3634 // param_start should not alias with any parameter registers.
3635 Register params_start = eax;
3636 __ mov(params_start,
3637 MemOperand(wrapper_buffer,
3638 JSToWasmWrapperFrameConstants::kWrapperBufferParamStart));
3639 Register params_end = esi;
3640 __ mov(params_end,
3641 MemOperand(wrapper_buffer,
3642 JSToWasmWrapperFrameConstants::kWrapperBufferParamEnd));
3643
3644 Register last_stack_param = ecx;
3645
3646 // The first GP parameter holds the trusted instance data or the import data.
3647 // This is handled specially.
3648 int stack_params_offset =
3651
3652 int param_padding = stack_params_offset & kSystemPointerSize;
3653 stack_params_offset += param_padding;
3654 __ lea(last_stack_param, MemOperand(params_start, stack_params_offset));
3655
3656 Label loop_start;
3657 __ bind(&loop_start);
3658
3659 Label finish_stack_params;
3660 __ cmp(last_stack_param, params_end);
3661 __ j(greater_equal, &finish_stack_params);
3662
3663 // Push parameter
3664 __ sub(params_end, Immediate(kSystemPointerSize));
3665 __ push(MemOperand(params_end, 0));
3666 __ jmp(&loop_start);
3667
3668 __ bind(&finish_stack_params);
3669
3670 int next_offset = stack_params_offset;
3671 for (size_t i = arraysize(wasm::kFpParamRegisters) - 1;
3673 next_offset -= kDoubleSize;
3674 __ Movsd(wasm::kFpParamRegisters[i], MemOperand(params_start, next_offset));
3675 }
3676
3677 // Set the flag-in-wasm flag before loading the parameter registers. There are
3678 // not so many registers, so we use one of the parameter registers before it
3679 // is blocked.
3680 Register thread_in_wasm_flag_addr = ecx;
3681 __ mov(
3682 thread_in_wasm_flag_addr,
3684 __ mov(MemOperand(thread_in_wasm_flag_addr, 0), Immediate(1));
3685
3686 next_offset -= param_padding;
3687 for (size_t i = arraysize(wasm::kGpParamRegisters) - 1; i > 0; --i) {
3688 next_offset -= kSystemPointerSize;
3689 __ mov(wasm::kGpParamRegisters[i], MemOperand(params_start, next_offset));
3690 }
3691 DCHECK_EQ(next_offset, 0);
3692 // Since there are so few registers, {params_start} overlaps with one of the
3693 // parameter registers. Make sure it overlaps with the last one we fill.
3694 DCHECK_EQ(params_start, wasm::kGpParamRegisters[1]);
3695
3696 // Load the implicit argument (instance data or import data) from the frame.
3697 if (stack_switch) {
3699 MemOperand(ebp, StackSwitchFrameConstants::kImplicitArgOffset));
3700 } else {
3702 MemOperand(ebp, JSToWasmWrapperFrameConstants::kImplicitArgOffset));
3703 }
3704
3705 Register call_target = edi;
3706 __ mov(call_target,
3707 MemOperand(wrapper_buffer,
3708 JSToWasmWrapperFrameConstants::kWrapperBufferCallTarget));
3709 if (stack_switch) {
3710 __ Move(MemOperand(ebp, StackSwitchFrameConstants::kGCScanSlotCountOffset),
3711 Immediate(0));
3712 }
3713 __ CallWasmCodePointer(call_target);
3714
3715 __ mov(
3716 thread_in_wasm_flag_addr,
3718 __ mov(MemOperand(thread_in_wasm_flag_addr, 0), Immediate(0));
3719 thread_in_wasm_flag_addr = no_reg;
3720
3721 wrapper_buffer = esi;
3722 __ mov(wrapper_buffer,
3723 MemOperand(ebp, JSToWasmWrapperFrameConstants::kWrapperBufferOffset));
3724
3725 __ Movsd(MemOperand(
3726 wrapper_buffer,
3727 JSToWasmWrapperFrameConstants::kWrapperBufferFPReturnRegister1),
3729 __ Movsd(MemOperand(
3730 wrapper_buffer,
3731 JSToWasmWrapperFrameConstants::kWrapperBufferFPReturnRegister2),
3733 __ mov(MemOperand(
3734 wrapper_buffer,
3735 JSToWasmWrapperFrameConstants::kWrapperBufferGPReturnRegister1),
3737 __ mov(MemOperand(
3738 wrapper_buffer,
3739 JSToWasmWrapperFrameConstants::kWrapperBufferGPReturnRegister2),
3741
3742 // Call the return value builtin with
3743 // eax: wasm instance.
3744 // ecx: the result JSArray for multi-return.
3745 // edx: pointer to the byte buffer which contains all parameters.
3746 if (stack_switch) {
3747 __ mov(eax, MemOperand(ebp, StackSwitchFrameConstants::kImplicitArgOffset));
3748 __ mov(ecx, MemOperand(ebp, StackSwitchFrameConstants::kResultArrayOffset));
3749 } else {
3750 __ mov(eax,
3751 MemOperand(ebp, JSToWasmWrapperFrameConstants::kImplicitArgOffset));
3752 __ mov(ecx,
3753 MemOperand(ebp,
3754 JSToWasmWrapperFrameConstants::kResultArrayParamOffset));
3755 }
3756 Register scratch = edx;
3757 GetContextFromImplicitArg(masm, eax, scratch);
3758 __ mov(edx, wrapper_buffer);
3759 __ CallBuiltin(Builtin::kJSToWasmHandleReturns);
3760
3761 Label return_promise;
3762
3763 if (stack_switch) {
3764 SwitchBackAndReturnPromise(masm, edx, edi, mode, &return_promise);
3765 }
3766 __ bind(&suspend);
3767
3768 __ LeaveFrame(stack_switch ? StackFrame::STACK_SWITCH
3769 : StackFrame::JS_TO_WASM);
3770 __ ret(0);
3771
3772 // Catch handler for the stack-switching wrapper: reject the promise with the
3773 // thrown exception.
3774 if (mode == wasm::kPromise) {
3775 GenerateExceptionHandlingLandingPad(masm, &return_promise);
3776 }
3777}
3778} // namespace
3779
3780void Builtins::Generate_JSToWasmWrapperAsm(MacroAssembler* masm) {
3781 JSToWasmWrapperHelper(masm, wasm::kNoPromise);
3782}
3783
3784void Builtins::Generate_WasmReturnPromiseOnSuspendAsm(MacroAssembler* masm) {
3785 JSToWasmWrapperHelper(masm, wasm::kPromise);
3786}
3787
3788void Builtins::Generate_JSToWasmStressSwitchStacksAsm(MacroAssembler* masm) {
3789 JSToWasmWrapperHelper(masm, wasm::kStressSwitch);
3790}
3791
3792void Builtins::Generate_WasmToJsWrapperAsm(MacroAssembler* masm) {
3793 // Pop the return address into a scratch register and push it later again. The
3794 // return address has to be on top of the stack after all registers have been
3795 // pushed, so that the return instruction can find it.
3796 Register scratch = edi;
3797 __ pop(scratch);
3798
3799 int required_stack_space = arraysize(wasm::kFpParamRegisters) * kDoubleSize;
3800 __ sub(esp, Immediate(required_stack_space));
3801 for (int i = 0; i < static_cast<int>(arraysize(wasm::kFpParamRegisters));
3802 ++i) {
3804 }
3805 // eax is pushed for alignment, so that the pushed register parameters and
3806 // stack parameters look the same as the layout produced by the js-to-wasm
3807 // wrapper for out-going parameters. Having the same layout allows to share
3808 // code in Torque, especially the `LocationAllocator`. eax has been picked
3809 // arbitrarily.
3810 __ push(eax);
3811 // Push the GP registers in reverse order so that they are on the stack like
3812 // in an array, with the first item being at the lowest address.
3813 for (size_t i = arraysize(wasm::kGpParamRegisters) - 1; i > 0; --i) {
3815 }
3816 // Reserve a slot for the signature.
3817 __ push(eax);
3818 // Push the return address again.
3819 __ push(scratch);
3820 __ TailCallBuiltin(Builtin::kWasmToJsWrapperCSA);
3821}
3822
3823void Builtins::Generate_WasmTrapHandlerLandingPad(MacroAssembler* masm) {
3824 __ Trap();
3825}
3826
3827void Builtins::Generate_WasmSuspend(MacroAssembler* masm) {
3828 // Set up the stackframe.
3829 __ EnterFrame(StackFrame::STACK_SWITCH);
3830
3831 Register suspender = eax;
3832
3833 __ AllocateStackSpace(StackSwitchFrameConstants::kNumSpillSlots *
3835 // Set a sentinel value for the spill slots visited by the GC.
3836 ResetStackSwitchFrameStackSlots(masm);
3837
3838 // -------------------------------------------
3839 // Save current state in active jump buffer.
3840 // -------------------------------------------
3841 Label resume;
3842 Register stack = edx;
3843 __ LoadRootRelative(stack, IsolateData::active_stack_offset());
3844 FillJumpBuffer(masm, stack, ecx, &resume);
3845 SwitchStackState(masm, stack, wasm::JumpBuffer::Active,
3847
3848 Register suspender_stack = edi;
3849 __ Move(suspender_stack,
3850 FieldOperand(suspender, WasmSuspenderObject::kStackOffset));
3851#ifdef DEBUG
3852 // -------------------------------------------
3853 // Check that the suspender's stack is the active stack.
3854 // -------------------------------------------
3855 // TODO(thibaudm): Once we add core stack-switching instructions, this check
3856 // will not hold anymore: it's possible that the active stack changed
3857 // (due to an internal switch), so we have to update the suspender.
3858 __ cmp(suspender_stack, stack);
3859 Label ok;
3860 __ j(equal, &ok);
3861 __ Trap();
3862 __ bind(&ok);
3863#endif
3864
3865 // -------------------------------------------
3866 // Update roots.
3867 // -------------------------------------------
3868 Register caller = ecx;
3869 __ Move(caller, Operand(suspender_stack, wasm::kStackParentOffset));
3870 __ StoreRootRelative(IsolateData::active_stack_offset(), caller);
3871 Register parent = edi;
3872 __ Move(parent, FieldOperand(suspender, WasmSuspenderObject::kParentOffset));
3873 __ mov(masm->RootAsOperand(RootIndex::kActiveSuspender), parent);
3874 parent = no_reg;
3875
3876 // -------------------------------------------
3877 // Load jump buffer.
3878 // -------------------------------------------
3879 SwitchStacks(masm, stack, false, {caller, suspender});
3880 __ Move(kReturnRegister0,
3881 FieldOperand(suspender, WasmSuspenderObject::kPromiseOffset));
3882 MemOperand GCScanSlotPlace =
3883 MemOperand(ebp, StackSwitchFrameConstants::kGCScanSlotCountOffset);
3884 __ Move(GCScanSlotPlace, Immediate(0));
3885 LoadJumpBuffer(masm, caller, true, wasm::JumpBuffer::Inactive);
3886 __ Trap();
3887 __ bind(&resume);
3888 __ LeaveFrame(StackFrame::STACK_SWITCH);
3889 __ ret(0);
3890}
3891
3892namespace {
3893// Resume the suspender stored in the closure. We generate two variants of this
3894// builtin: the onFulfilled variant resumes execution at the saved PC and
3895// forwards the value, the onRejected variant throws the value.
3896
3897void Generate_WasmResumeHelper(MacroAssembler* masm, wasm::OnResume on_resume) {
3898 __ EnterFrame(StackFrame::STACK_SWITCH);
3899
3900 Register closure = kJSFunctionRegister; // edi
3901
3902 __ AllocateStackSpace(StackSwitchFrameConstants::kNumSpillSlots *
3904 // Set a sentinel value for the spill slots visited by the GC.
3905 ResetStackSwitchFrameStackSlots(masm);
3906
3907 // -------------------------------------------
3908 // Load suspender from closure.
3909 // -------------------------------------------
3910 Register sfi = closure;
3911 __ Move(
3912 sfi,
3913 MemOperand(
3914 closure,
3916 Register function_data = sfi;
3917 __ Move(function_data,
3918 FieldOperand(sfi, SharedFunctionInfo::kUntrustedFunctionDataOffset));
3919 // The write barrier uses a fixed register for the host object (edi). The next
3920 // barrier is on the suspender, so load it in edi directly.
3921 Register suspender = edi;
3922 __ Move(suspender,
3923 FieldOperand(function_data, WasmResumeData::kSuspenderOffset));
3924 closure = no_reg;
3925 sfi = no_reg;
3926
3927 // -------------------------------------------
3928 // Save current state.
3929 // -------------------------------------------
3930
3931 Label suspend;
3932 Register active_stack = eax;
3933 __ LoadRootRelative(active_stack, IsolateData::active_stack_offset());
3934 FillJumpBuffer(masm, active_stack, edx, &suspend);
3935 SwitchStackState(masm, active_stack, wasm::JumpBuffer::Active,
3937 active_stack = no_reg;
3938
3939 // -------------------------------------------
3940 // Set the suspender and continuation parents and update the roots.
3941 // -------------------------------------------
3942 Register active_suspender = edx;
3944 // Check that the fixed register isn't one that is already in use.
3945 DCHECK(!AreAliased(slot_address, suspender, active_suspender));
3946
3947 __ LoadRoot(active_suspender, RootIndex::kActiveSuspender);
3948 __ mov(FieldOperand(suspender, WasmSuspenderObject::kParentOffset),
3949 active_suspender);
3950 __ RecordWriteField(suspender, WasmSuspenderObject::kParentOffset,
3951 active_suspender, slot_address, SaveFPRegsMode::kIgnore);
3952 __ mov(masm->RootAsOperand(RootIndex::kActiveSuspender), suspender);
3953
3954 active_suspender = no_reg;
3955
3956 Register target_stack = suspender;
3957 __ Move(target_stack,
3958 FieldOperand(suspender, WasmSuspenderObject::kStackOffset));
3959 suspender = no_reg;
3960 active_stack = edx;
3961 __ LoadRootRelative(active_stack, IsolateData::active_stack_offset());
3962 __ StoreRootRelative(IsolateData::active_stack_offset(), target_stack);
3963 SwitchStacks(masm, active_stack, false, {target_stack});
3964
3965 // -------------------------------------------
3966 // Load state from target jmpbuf (longjmp).
3967 // -------------------------------------------
3968 // Move resolved value to return register.
3969 __ mov(kReturnRegister0, Operand(ebp, 3 * kSystemPointerSize));
3970 __ Move(MemOperand(ebp, StackSwitchFrameConstants::kGCScanSlotCountOffset),
3971 Immediate(0));
3972 if (on_resume == wasm::OnResume::kThrow) {
3973 // Switch without restoring the PC.
3974 LoadJumpBuffer(masm, target_stack, false, wasm::JumpBuffer::Suspended);
3975 // Pop this frame now. The unwinder expects that the first STACK_SWITCH
3976 // frame is the outermost one.
3977 __ LeaveFrame(StackFrame::STACK_SWITCH);
3978 // Forward the onRejected value to kThrow.
3980 __ Move(kContextRegister, Smi::zero());
3981 __ CallRuntime(Runtime::kThrow);
3982 } else {
3983 // Resume the stack normally.
3984 LoadJumpBuffer(masm, target_stack, true, wasm::JumpBuffer::Suspended);
3985 }
3986 __ Trap();
3987 __ bind(&suspend);
3988 __ LeaveFrame(StackFrame::STACK_SWITCH);
3989 // Pop receiver + parameter.
3990 __ ret(2 * kSystemPointerSize);
3991}
3992} // namespace
3993
3994void Builtins::Generate_WasmResume(MacroAssembler* masm) {
3995 Generate_WasmResumeHelper(masm, wasm::OnResume::kContinue);
3996}
3997
3998void Builtins::Generate_WasmReject(MacroAssembler* masm) {
3999 Generate_WasmResumeHelper(masm, wasm::OnResume::kThrow);
4000}
4001
4002void Builtins::Generate_WasmOnStackReplace(MacroAssembler* masm) {
4003 // Only needed on x64.
4004 __ Trap();
4005}
4006
4007namespace {
4008static constexpr Register kOldSPRegister = esi;
4009
4010void SwitchToTheCentralStackIfNeeded(MacroAssembler* masm, int edi_slot_index) {
4011 using ER = ExternalReference;
4012
4013 // Preserve edi on the stack as a local.
4014 __ mov(ExitFrameStackSlotOperand(edi_slot_index * kSystemPointerSize), edi);
4015
4016 // kOldSPRegister used as a switch flag, if it is zero - no switch performed
4017 // if it is not zero, it contains old sp value.
4018 __ Move(kOldSPRegister, 0);
4019
4020 DCHECK(!AreAliased(kOldSPRegister, ecx, ebx));
4021
4022 ER on_central_stack_flag = ER::Create(
4023 IsolateAddressId::kIsOnCentralStackFlagAddress, masm->isolate());
4024
4025 Label do_not_need_to_switch;
4026 __ cmpb(__ ExternalReferenceAsOperand(on_central_stack_flag, ecx),
4027 Immediate(0));
4028 __ j(not_zero, &do_not_need_to_switch);
4029
4030 // Perform switching to the central stack.
4031 __ mov(kOldSPRegister, esp);
4032
4033 Register argc_input = eax;
4034 Register central_stack_sp = edi;
4035 DCHECK(!AreAliased(central_stack_sp, argc_input));
4036 {
4037 FrameScope scope(masm, StackFrame::MANUAL);
4038 __ push(argc_input);
4040
4041 __ PrepareCallCFunction(2, ecx);
4042
4043 __ Move(Operand(esp, 0 * kSystemPointerSize),
4044 Immediate(ER::isolate_address()));
4045 __ mov(Operand(esp, 1 * kSystemPointerSize), kOldSPRegister);
4046
4047 __ CallCFunction(ER::wasm_switch_to_the_central_stack(), 2,
4049 __ mov(central_stack_sp, kReturnRegister0);
4050
4052 __ pop(argc_input);
4053 }
4054
4055 static constexpr int kReturnAddressSlotOffset = 4 * kSystemPointerSize;
4056 __ sub(central_stack_sp, Immediate(kReturnAddressSlotOffset));
4057 __ mov(esp, central_stack_sp);
4058
4059 // esp should be aligned by 16 bytes,
4060 // but it is not guaranteed for stored SP.
4061 __ AlignStackPointer();
4062
4063 // Update the sp saved in the frame.
4064 // It will be used to calculate the callee pc during GC.
4065 // The pc is going to be on the new stack segment, so rewrite it here.
4066 __ mov(Operand(ebp, ExitFrameConstants::kSPOffset), esp);
4067
4068 Label exitLabel;
4069 // Restore bashed edi, so we can make the CCall properly.
4070 __ mov(edi, Operand(kOldSPRegister, edi_slot_index * kSystemPointerSize));
4071 __ jmp(&exitLabel);
4072 __ bind(&do_not_need_to_switch);
4073 __ mov(edi, ExitFrameStackSlotOperand(edi_slot_index * kSystemPointerSize));
4074
4075 __ bind(&exitLabel);
4076}
4077
4078void SwitchFromTheCentralStackIfNeeded(MacroAssembler* masm) {
4079 using ER = ExternalReference;
4080
4081 Label no_stack_change;
4082 __ cmp(kOldSPRegister, Immediate(0));
4083 __ j(equal, &no_stack_change);
4084 __ mov(esp, kOldSPRegister);
4085
4086 {
4087 FrameScope scope(masm, StackFrame::MANUAL);
4090
4091 __ PrepareCallCFunction(1, ecx);
4092 __ Move(Operand(esp, 0 * kSystemPointerSize),
4093 Immediate(ER::isolate_address()));
4094 __ CallCFunction(ER::wasm_switch_from_the_central_stack(), 1,
4096
4097 __ pop(kReturnRegister1);
4098 __ pop(kReturnRegister0);
4099 }
4100
4101 __ bind(&no_stack_change);
4102}
4103
4104} // namespace
4105
4106#endif // V8_ENABLE_WEBASSEMBLY
4107
4108void Builtins::Generate_CEntry(MacroAssembler* masm, int result_size,
4109 ArgvMode argv_mode, bool builtin_exit_frame,
4110 bool switch_to_central_stack) {
4111 CHECK(result_size == 1 || result_size == 2);
4112
4113 using ER = ExternalReference;
4114
4115 // eax: number of arguments including receiver
4116 // edx: pointer to C function
4117 // ebp: frame pointer (restored after C call)
4118 // esp: stack pointer (restored after C call)
4119 // esi: current context (C callee-saved)
4120 // edi: JS function of the caller (C callee-saved)
4121 //
4122 // If argv_mode == ArgvMode::kRegister:
4123 // ecx: pointer to the first argument
4124
4125 static_assert(eax == kRuntimeCallArgCountRegister);
4126 static_assert(ecx == kRuntimeCallArgvRegister);
4127 static_assert(edx == kRuntimeCallFunctionRegister);
4128 static_assert(esi == kContextRegister);
4129 static_assert(edi == kJSFunctionRegister);
4130
4134
4135 const int kSwitchToTheCentralStackSlots = switch_to_central_stack ? 1 : 0;
4136 const int kReservedStackSlots = 3 + kSwitchToTheCentralStackSlots;
4137
4138#if V8_ENABLE_WEBASSEMBLY
4139 const int kEdiSlot = kReservedStackSlots - 1;
4140#endif // V8_ENABLE_WEBASSEMBLY
4141
4142 __ EnterExitFrame(
4143 kReservedStackSlots,
4144 builtin_exit_frame ? StackFrame::BUILTIN_EXIT : StackFrame::EXIT, edi);
4145
4146 // Set up argv in a callee-saved register. It is reused below so it must be
4147 // retained across the C call.
4148 static constexpr Register kArgvRegister = edi;
4149 if (argv_mode == ArgvMode::kRegister) {
4150 __ mov(kArgvRegister, ecx);
4151 } else {
4152 int offset =
4154 __ lea(kArgvRegister, Operand(ebp, eax, times_system_pointer_size, offset));
4155 }
4156
4157 // edx: pointer to C function
4158 // ebp: frame pointer (restored after C call)
4159 // esp: stack pointer (restored after C call)
4160 // eax: number of arguments including receiver
4161 // edi: pointer to the first argument (C callee-saved)
4162
4163#if V8_ENABLE_WEBASSEMBLY
4164 if (switch_to_central_stack) {
4165 SwitchToTheCentralStackIfNeeded(masm, kEdiSlot);
4166 }
4167#endif // V8_ENABLE_WEBASSEMBLY
4168 // Result returned in eax, or eax+edx if result size is 2.
4169
4170 // Check stack alignment.
4171 if (v8_flags.debug_code) {
4172 __ CheckStackAlignment();
4173 }
4174 // Call C function.
4175 __ mov(Operand(esp, 0 * kSystemPointerSize), eax); // argc.
4176 __ mov(Operand(esp, 1 * kSystemPointerSize), kArgvRegister); // argv.
4177 __ Move(ecx, Immediate(ER::isolate_address()));
4178 __ mov(Operand(esp, 2 * kSystemPointerSize), ecx);
4180
4181 // Result is in eax or edx:eax - do not destroy these registers!
4182
4183 // Check result for exception sentinel.
4184 Label exception_returned;
4185 __ CompareRoot(eax, RootIndex::kException);
4186 __ j(equal, &exception_returned);
4187
4188 // Check that there is no exception, otherwise we
4189 // should have returned the exception sentinel.
4190 if (v8_flags.debug_code) {
4191 __ push(edx);
4192 __ LoadRoot(edx, RootIndex::kTheHoleValue);
4193 Label okay;
4194 ER exception_address =
4195 ER::Create(IsolateAddressId::kExceptionAddress, masm->isolate());
4196 __ cmp(edx, __ ExternalReferenceAsOperand(exception_address, ecx));
4197 // Cannot use check here as it attempts to generate call into runtime.
4198 __ j(equal, &okay, Label::kNear);
4199 __ int3();
4200 __ bind(&okay);
4201 __ pop(edx);
4202 }
4203
4204#if V8_ENABLE_WEBASSEMBLY
4205 if (switch_to_central_stack) {
4206 SwitchFromTheCentralStackIfNeeded(masm);
4207 }
4208#endif // V8_ENABLE_WEBASSEMBLY
4209
4210 __ LeaveExitFrame(esi);
4211 if (argv_mode == ArgvMode::kStack) {
4212 // Drop arguments and the receiver from the caller stack.
4213 DCHECK(!AreAliased(esi, kArgvRegister));
4214 __ PopReturnAddressTo(ecx);
4215 __ lea(esp, Operand(kArgvRegister, kReceiverOnStackSize));
4216 __ PushReturnAddressFrom(ecx);
4217 }
4218 __ ret(0);
4219
4220 // Handling of exception.
4221 __ bind(&exception_returned);
4222
4223 ER pending_handler_context_address = ER::Create(
4224 IsolateAddressId::kPendingHandlerContextAddress, masm->isolate());
4225 ER pending_handler_entrypoint_address = ER::Create(
4226 IsolateAddressId::kPendingHandlerEntrypointAddress, masm->isolate());
4227 ER pending_handler_fp_address =
4228 ER::Create(IsolateAddressId::kPendingHandlerFPAddress, masm->isolate());
4229 ER pending_handler_sp_address =
4230 ER::Create(IsolateAddressId::kPendingHandlerSPAddress, masm->isolate());
4231
4232 // Ask the runtime for help to determine the handler. This will set eax to
4233 // contain the current exception, don't clobber it.
4234 ER find_handler = ER::Create(Runtime::kUnwindAndFindExceptionHandler);
4235 {
4236 FrameScope scope(masm, StackFrame::MANUAL);
4237 __ PrepareCallCFunction(3, eax);
4238 __ mov(Operand(esp, 0 * kSystemPointerSize), Immediate(0)); // argc.
4239 __ mov(Operand(esp, 1 * kSystemPointerSize), Immediate(0)); // argv.
4240 __ Move(esi, Immediate(ER::isolate_address()));
4241 __ mov(Operand(esp, 2 * kSystemPointerSize), esi);
4242 __ CallCFunction(find_handler, 3, SetIsolateDataSlots::kNo);
4243 }
4244
4245 // Retrieve the handler context, SP and FP.
4246 __ mov(esp, __ ExternalReferenceAsOperand(pending_handler_sp_address, esi));
4247 __ mov(ebp, __ ExternalReferenceAsOperand(pending_handler_fp_address, esi));
4248 __ mov(esi,
4249 __ ExternalReferenceAsOperand(pending_handler_context_address, esi));
4250
4251 // If the handler is a JS frame, restore the context to the frame. Note that
4252 // the context will be set to (esi == 0) for non-JS frames.
4253 Label skip;
4254 __ test(esi, esi);
4255 __ j(zero, &skip, Label::kNear);
4256 __ mov(Operand(ebp, StandardFrameConstants::kContextOffset), esi);
4257 __ bind(&skip);
4258
4259 // Clear c_entry_fp, like we do in `LeaveExitFrame`.
4260 ER c_entry_fp_address =
4261 ER::Create(IsolateAddressId::kCEntryFPAddress, masm->isolate());
4262 __ mov(__ ExternalReferenceAsOperand(c_entry_fp_address, esi), Immediate(0));
4263
4264 // Compute the handler entry address and jump to it.
4265 __ mov(edi, __ ExternalReferenceAsOperand(pending_handler_entrypoint_address,
4266 edi));
4267 __ jmp(edi);
4268}
4269
4270#if V8_ENABLE_WEBASSEMBLY
4271void Builtins::Generate_WasmHandleStackOverflow(MacroAssembler* masm) {
4272 using ER = ExternalReference;
4273 Register frame_base =
4274 WasmHandleStackOverflowDescriptor::FrameBaseRegister(); // eax
4276 Register original_fp = edx;
4277 Register original_sp = esi;
4278 __ mov(original_fp, ebp);
4279 __ mov(original_sp, esp);
4280 // Calculate frame size before SP is updated.
4281 __ sub(frame_base, esp);
4282 {
4283 Register scratch = edi;
4284 DCHECK(!AreAliased(original_fp, original_sp, frame_base, gap, scratch));
4285 FrameScope scope(masm, StackFrame::INTERNAL);
4286 __ push(gap);
4287 __ PrepareCallCFunction(5, scratch);
4288 __ mov(Operand(esp, 4 * kSystemPointerSize), original_fp);
4289 __ mov(Operand(esp, 3 * kSystemPointerSize), gap);
4290 __ mov(Operand(esp, 2 * kSystemPointerSize), frame_base);
4291 __ mov(Operand(esp, 1 * kSystemPointerSize), original_sp);
4292 __ Move(Operand(esp, 0 * kSystemPointerSize),
4294 __ CallCFunction(ER::wasm_grow_stack(), 5);
4295 __ pop(gap);
4297 }
4298 Label call_runtime;
4299 // wasm_grow_stack returns zero if it cannot grow a stack.
4301 __ j(zero, &call_runtime, Label::kNear);
4302 Register new_fp = edx;
4303 // Calculate old FP - SP offset to adjust FP accordingly to new SP.
4304 __ sub(ebp, esp);
4305 __ add(ebp, kReturnRegister0);
4306 __ mov(esp, kReturnRegister0);
4307 Register tmp = new_fp;
4308 __ mov(tmp,
4309 Immediate(StackFrame::TypeToMarker(StackFrame::WASM_SEGMENT_START)));
4311 __ ret(0);
4312
4313 // If wasm_grow_stack returns zero interruption or stack overflow
4314 // should be handled by runtime call.
4315 {
4316 __ bind(&call_runtime);
4318 MemOperand(ebp, WasmFrameConstants::kWasmInstanceDataOffset));
4319 __ mov(kContextRegister,
4321 WasmTrustedInstanceData::kNativeContextOffset));
4322 FrameScope scope(masm, StackFrame::MANUAL);
4323 __ EnterFrame(StackFrame::INTERNAL);
4324 __ SmiTag(gap);
4325 __ push(gap);
4326 __ CallRuntime(Runtime::kWasmStackGuard);
4327 __ LeaveFrame(StackFrame::INTERNAL);
4328 __ ret(0);
4329 }
4330}
4331#endif // V8_ENABLE_WEBASSEMBLY
4332
4333void Builtins::Generate_DoubleToI(MacroAssembler* masm) {
4334 Label check_negative, process_64_bits, done;
4335
4336 // Account for return address and saved regs.
4337 const int kArgumentOffset = 4 * kSystemPointerSize;
4338
4339 MemOperand mantissa_operand(MemOperand(esp, kArgumentOffset));
4340 MemOperand exponent_operand(
4341 MemOperand(esp, kArgumentOffset + kDoubleSize / 2));
4342
4343 // The result is returned on the stack.
4344 MemOperand return_operand = mantissa_operand;
4345
4346 Register scratch1 = ebx;
4347
4348 // Since we must use ecx for shifts below, use some other register (eax)
4349 // to calculate the result.
4350 Register result_reg = eax;
4351 // Save ecx if it isn't the return register and therefore volatile, or if it
4352 // is the return register, then save the temp register we use in its stead for
4353 // the result.
4354 Register save_reg = eax;
4355 __ push(ecx);
4356 __ push(scratch1);
4357 __ push(save_reg);
4358
4359 __ mov(scratch1, mantissa_operand);
4360 if (CpuFeatures::IsSupported(SSE3)) {
4361 CpuFeatureScope scope(masm, SSE3);
4362 // Load x87 register with heap number.
4363 __ fld_d(mantissa_operand);
4364 }
4365 __ mov(ecx, exponent_operand);
4366
4367 __ and_(ecx, HeapNumber::kExponentMask);
4369 __ lea(result_reg, MemOperand(ecx, -HeapNumber::kExponentBias));
4370 __ cmp(result_reg, Immediate(HeapNumber::kMantissaBits));
4371 __ j(below, &process_64_bits);
4372
4373 // Result is entirely in lower 32-bits of mantissa
4374 int delta =
4376 if (CpuFeatures::IsSupported(SSE3)) {
4377 __ fstp(0);
4378 }
4379 __ sub(ecx, Immediate(delta));
4380 __ xor_(result_reg, result_reg);
4381 __ cmp(ecx, Immediate(31));
4382 __ j(above, &done);
4383 __ shl_cl(scratch1);
4384 __ jmp(&check_negative);
4385
4386 __ bind(&process_64_bits);
4387 if (CpuFeatures::IsSupported(SSE3)) {
4388 CpuFeatureScope scope(masm, SSE3);
4389 // Reserve space for 64 bit answer.
4390 __ AllocateStackSpace(kDoubleSize); // Nolint.
4391 // Do conversion, which cannot fail because we checked the exponent.
4392 __ fisttp_d(Operand(esp, 0));
4393 __ mov(result_reg, Operand(esp, 0)); // Load low word of answer as result
4394 __ add(esp, Immediate(kDoubleSize));
4395 __ jmp(&done);
4396 } else {
4397 // Result must be extracted from shifted 32-bit mantissa
4398 __ sub(ecx, Immediate(delta));
4399 __ neg(ecx);
4400 __ mov(result_reg, exponent_operand);
4401 __ and_(
4402 result_reg,
4403 Immediate(static_cast<uint32_t>(base::Double::kSignificandMask >> 32)));
4404 __ add(result_reg,
4405 Immediate(static_cast<uint32_t>(base::Double::kHiddenBit >> 32)));
4406 __ shrd_cl(scratch1, result_reg);
4407 __ shr_cl(result_reg);
4408 __ test(ecx, Immediate(32));
4409 __ cmov(not_equal, scratch1, result_reg);
4410 }
4411
4412 // If the double was negative, negate the integer result.
4413 __ bind(&check_negative);
4414 __ mov(result_reg, scratch1);
4415 __ neg(result_reg);
4416 __ cmp(exponent_operand, Immediate(0));
4417 __ cmov(greater, result_reg, scratch1);
4418
4419 // Restore registers
4420 __ bind(&done);
4421 __ mov(return_operand, result_reg);
4422 __ pop(save_reg);
4423 __ pop(scratch1);
4424 __ pop(ecx);
4425 __ ret(0);
4426}
4427
4428void Builtins::Generate_CallApiCallbackImpl(MacroAssembler* masm,
4429 CallApiCallbackMode mode) {
4430 // ----------- S t a t e -------------
4431 // CallApiCallbackMode::kOptimizedNoProfiling/kOptimized modes:
4432 // -- eax : api function address
4433 // Both modes:
4434 // -- ecx : arguments count (not including the receiver)
4435 // -- edx : FunctionTemplateInfo
4436 // -- esi : context
4437 // -- esp[0] : return address
4438 // -- esp[8] : argument 0 (receiver)
4439 // -- esp[16] : argument 1
4440 // -- ...
4441 // -- esp[argc * 8] : argument (argc - 1)
4442 // -- esp[(argc + 1) * 8] : argument argc
4443 // -----------------------------------
4444
4445 Register api_function_address = no_reg;
4446 Register argc = no_reg;
4447 Register func_templ = no_reg;
4448 Register topmost_script_having_context = no_reg;
4449 Register scratch = edi;
4450
4451 switch (mode) {
4453 argc = CallApiCallbackGenericDescriptor::ActualArgumentsCountRegister();
4454 topmost_script_having_context = CallApiCallbackGenericDescriptor::
4456 func_templ =
4458 break;
4459
4462 // Caller context is always equal to current context because we don't
4463 // inline Api calls cross-context.
4464 topmost_script_having_context = kContextRegister;
4465 api_function_address =
4466 CallApiCallbackOptimizedDescriptor::ApiFunctionAddressRegister();
4468 func_templ =
4470 break;
4471 }
4472 DCHECK(!AreAliased(api_function_address, topmost_script_having_context, argc,
4473 func_templ, scratch));
4474
4475 using FCA = FunctionCallbackArguments;
4476 using ER = ExternalReference;
4477 using FC = ApiCallbackExitFrameConstants;
4478
4479 static_assert(FCA::kArgsLength == 6);
4480 static_assert(FCA::kNewTargetIndex == 5);
4481 static_assert(FCA::kTargetIndex == 4);
4482 static_assert(FCA::kReturnValueIndex == 3);
4483 static_assert(FCA::kContextIndex == 2);
4484 static_assert(FCA::kIsolateIndex == 1);
4485 static_assert(FCA::kUnusedIndex == 0);
4486
4487 // Set up FunctionCallbackInfo's implicit_args on the stack as follows:
4488 //
4489 // Current state:
4490 // esp[0]: return address
4491 //
4492 // Target state:
4493 // esp[0 * kSystemPointerSize]: return address
4494 // esp[1 * kSystemPointerSize]: kUnused <= FCA::implicit_args_
4495 // esp[2 * kSystemPointerSize]: kIsolate
4496 // esp[3 * kSystemPointerSize]: kContext
4497 // esp[4 * kSystemPointerSize]: undefined (kReturnValue)
4498 // esp[5 * kSystemPointerSize]: kTarget
4499 // esp[6 * kSystemPointerSize]: undefined (kNewTarget)
4500 // Existing state:
4501 // esp[7 * kSystemPointerSize]: <= FCA:::values_
4502
4503 __ StoreRootRelative(IsolateData::topmost_script_having_context_offset(),
4504 topmost_script_having_context);
4505
4506 if (mode == CallApiCallbackMode::kGeneric) {
4507 api_function_address = ReassignRegister(topmost_script_having_context);
4508 }
4509
4510 // Park argc in xmm0.
4511 __ movd(xmm0, argc);
4512
4513 __ PopReturnAddressTo(argc);
4514 __ PushRoot(RootIndex::kUndefinedValue); // kNewTarget
4515 __ Push(func_templ); // kTarget
4516 __ PushRoot(RootIndex::kUndefinedValue); // kReturnValue
4517 __ Push(kContextRegister); // kContext
4518
4519 // TODO(ishell): Consider using LoadAddress+push approach here.
4520 __ Push(Immediate(ER::isolate_address()));
4521 __ PushRoot(RootIndex::kUndefinedValue); // kUnused
4522
4523 // The API function takes v8::FunctionCallbackInfo reference, allocate it
4524 // in non-GCed space of the exit frame.
4525 static constexpr int kApiArgc = 1;
4526 static constexpr int kApiArg0Offset = 0 * kSystemPointerSize;
4527
4528 if (mode == CallApiCallbackMode::kGeneric) {
4529 __ mov(api_function_address,
4530 FieldOperand(func_templ,
4531 FunctionTemplateInfo::kMaybeRedirectedCallbackOffset));
4532 }
4533
4534 __ PushReturnAddressFrom(argc);
4535
4536 // The ApiCallbackExitFrame must be big enough to store the outgoing
4537 // parameters for C function on the stack.
4538 constexpr int extra_slots =
4539 FC::getExtraSlotsCountFrom<ExitFrameConstants>() + kApiArgc;
4540 __ EnterExitFrame(extra_slots, StackFrame::API_CALLBACK_EXIT,
4541 api_function_address);
4542
4543 if (v8_flags.debug_code) {
4544 __ mov(esi, Immediate(base::bit_cast<int32_t>(kZapValue)));
4545 }
4546
4547 // Reload argc from xmm0.
4548 __ movd(argc, xmm0);
4549
4550 Operand argc_operand = Operand(ebp, FC::kFCIArgcOffset);
4551 {
4552 ASM_CODE_COMMENT_STRING(masm, "Initialize v8::FunctionCallbackInfo");
4553 // FunctionCallbackInfo::length_.
4554 // TODO(ishell): pass JSParameterCount(argc) to simplify things on the
4555 // caller end.
4556 __ mov(argc_operand, argc);
4557
4558 // FunctionCallbackInfo::implicit_args_.
4559 __ lea(scratch, Operand(ebp, FC::kImplicitArgsArrayOffset));
4560 __ mov(Operand(ebp, FC::kFCIImplicitArgsOffset), scratch);
4561
4562 // FunctionCallbackInfo::values_ (points at JS arguments on the stack).
4563 __ lea(scratch, Operand(ebp, FC::kFirstArgumentOffset));
4564 __ mov(Operand(ebp, FC::kFCIValuesOffset), scratch);
4565 }
4566
4567 __ RecordComment("v8::FunctionCallback's argument.");
4568 __ lea(scratch, Operand(ebp, FC::kFunctionCallbackInfoOffset));
4569 __ mov(ExitFrameStackSlotOperand(kApiArg0Offset), scratch);
4570
4571 ExternalReference thunk_ref = ER::invoke_function_callback(mode);
4572 Register no_thunk_arg = no_reg;
4573
4574 Operand return_value_operand = Operand(ebp, FC::kReturnValueOffset);
4575 static constexpr int kSlotsToDropOnReturn =
4576 FC::kFunctionCallbackInfoArgsLength + kJSArgcReceiverSlots;
4577
4578 const bool with_profiling =
4580 CallApiFunctionAndReturn(masm, with_profiling, api_function_address,
4581 thunk_ref, no_thunk_arg, kSlotsToDropOnReturn,
4582 &argc_operand, return_value_operand);
4583}
4584
4585void Builtins::Generate_CallApiGetter(MacroAssembler* masm) {
4586 // ----------- S t a t e -------------
4587 // -- esi : context
4588 // -- edx : receiver
4589 // -- ecx : holder
4590 // -- eax : accessor info
4591 // -- esp[0] : return address
4592 // -----------------------------------
4593
4594 Register receiver = ApiGetterDescriptor::ReceiverRegister();
4597 Register scratch = edi;
4598 DCHECK(!AreAliased(receiver, holder, callback, scratch));
4599
4600 // Build v8::PropertyCallbackInfo::args_ array on the stack and push property
4601 // name below the exit frame to make GC aware of them.
4602 using PCA = PropertyCallbackArguments;
4603 using ER = ExternalReference;
4604 using FC = ApiAccessorExitFrameConstants;
4605
4606 static_assert(PCA::kPropertyKeyIndex == 0);
4607 static_assert(PCA::kShouldThrowOnErrorIndex == 1);
4608 static_assert(PCA::kHolderIndex == 2);
4609 static_assert(PCA::kIsolateIndex == 3);
4610 static_assert(PCA::kHolderV2Index == 4);
4611 static_assert(PCA::kReturnValueIndex == 5);
4612 static_assert(PCA::kDataIndex == 6);
4613 static_assert(PCA::kThisIndex == 7);
4614 static_assert(PCA::kArgsLength == 8);
4615
4616 // Set up v8::PropertyCallbackInfo's (PCI) args_ on the stack as follows:
4617 // Current state:
4618 // esp[0]: return address
4619 //
4620 // Target state:
4621 // esp[0 * kSystemPointerSize]: return address
4622 // esp[1 * kSystemPointerSize]: name <= PCI::args_
4623 // esp[2 * kSystemPointerSize]: kShouldThrowOnErrorIndex
4624 // esp[3 * kSystemPointerSize]: kHolderIndex
4625 // esp[4 * kSystemPointerSize]: kIsolateIndex
4626 // esp[5 * kSystemPointerSize]: kHolderV2Index
4627 // esp[6 * kSystemPointerSize]: kReturnValueIndex
4628 // esp[7 * kSystemPointerSize]: kDataIndex
4629 // esp[8 * kSystemPointerSize]: kThisIndex / receiver
4630
4631 __ PopReturnAddressTo(scratch);
4632 __ push(receiver);
4633 __ push(FieldOperand(callback, AccessorInfo::kDataOffset));
4634 __ PushRoot(RootIndex::kUndefinedValue); // kReturnValue
4635 __ Push(Smi::zero()); // kHolderV2
4636 Register isolate_reg = ReassignRegister(receiver);
4637 __ LoadAddress(isolate_reg, ER::isolate_address());
4638 __ push(isolate_reg);
4639 __ push(holder);
4640 __ Push(Smi::FromInt(kDontThrow)); // should_throw_on_error -> kDontThrow
4641
4642 Register name = ReassignRegister(holder);
4643 __ mov(name, FieldOperand(callback, AccessorInfo::kNameOffset));
4644 __ push(name);
4645 __ PushReturnAddressFrom(scratch);
4646
4647 // The API function takes a name local handle and v8::PropertyCallbackInfo
4648 // reference, allocate them in non-GCed space of the exit frame.
4649 static constexpr int kApiArgc = 2;
4650 static constexpr int kApiArg0Offset = 0 * kSystemPointerSize;
4651 static constexpr int kApiArg1Offset = 1 * kSystemPointerSize;
4652
4653 Register api_function_address = ReassignRegister(isolate_reg);
4654 __ RecordComment("Load function_address");
4655 __ mov(api_function_address,
4656 FieldOperand(callback, AccessorInfo::kMaybeRedirectedGetterOffset));
4657
4658 __ EnterExitFrame(FC::getExtraSlotsCountFrom<ExitFrameConstants>() + kApiArgc,
4659 StackFrame::API_ACCESSOR_EXIT, api_function_address);
4660 if (v8_flags.debug_code) {
4661 __ mov(esi, Immediate(base::bit_cast<int32_t>(kZapValue)));
4662 }
4663
4664 __ RecordComment("Create v8::PropertyCallbackInfo object on the stack.");
4665 // property_callback_info_arg = v8::PropertyCallbackInfo&
4666 Register property_callback_info_arg = ReassignRegister(scratch);
4667 __ lea(property_callback_info_arg, Operand(ebp, FC::kArgsArrayOffset));
4668
4669 DCHECK(!AreAliased(api_function_address, property_callback_info_arg, name,
4670 callback));
4671
4672 __ RecordComment("Local<Name>");
4673#ifdef V8_ENABLE_DIRECT_HANDLE
4674 // name_arg = Local<Name>(name), name value was pushed to GC-ed stack space.
4675 __ mov(ExitFrameStackSlotOperand(kApiArg0Offset), name);
4676#else
4677 // name_arg = Local<Name>(&name), which is &args_array[kPropertyKeyIndex].
4678 static_assert(PCA::kPropertyKeyIndex == 0);
4679 __ mov(ExitFrameStackSlotOperand(kApiArg0Offset), property_callback_info_arg);
4680#endif
4681
4682 __ RecordComment("v8::PropertyCallbackInfo<T>&");
4683 __ mov(ExitFrameStackSlotOperand(kApiArg1Offset), property_callback_info_arg);
4684
4685 ExternalReference thunk_ref = ER::invoke_accessor_getter_callback();
4686 // Pass AccessorInfo to thunk wrapper in case profiler or side-effect
4687 // checking is enabled.
4688 Register thunk_arg = callback;
4689
4690 Operand return_value_operand = Operand(ebp, FC::kReturnValueOffset);
4691 static constexpr int kSlotsToDropOnReturn =
4692 FC::kPropertyCallbackInfoArgsLength;
4693 Operand* const kUseStackSpaceConstant = nullptr;
4694
4695 const bool with_profiling = true;
4696 CallApiFunctionAndReturn(masm, with_profiling, api_function_address,
4697 thunk_ref, thunk_arg, kSlotsToDropOnReturn,
4698 kUseStackSpaceConstant, return_value_operand);
4699}
4700
4701void Builtins::Generate_DirectCEntry(MacroAssembler* masm) {
4702 __ int3(); // Unused on this architecture.
4703}
4704
4705namespace {
4706
4707enum Direction { FORWARD, BACKWARD };
4708enum Alignment { MOVE_ALIGNED, MOVE_UNALIGNED };
4709
4710// Expects registers:
4711// esi - source, aligned if alignment == ALIGNED
4712// edi - destination, always aligned
4713// ecx - count (copy size in bytes)
4714// edx - loop count (number of 64 byte chunks)
4715void MemMoveEmitMainLoop(MacroAssembler* masm, Label* move_last_15,
4716 Direction direction, Alignment alignment) {
4717 ASM_CODE_COMMENT(masm);
4718 Register src = esi;
4719 Register dst = edi;
4720 Register count = ecx;
4721 Register loop_count = edx;
4722 Label loop, move_last_31, move_last_63;
4723 __ cmp(loop_count, 0);
4724 __ j(equal, &move_last_63);
4725 __ bind(&loop);
4726 // Main loop. Copy in 64 byte chunks.
4727 if (direction == BACKWARD) __ sub(src, Immediate(0x40));
4728 __ movdq(alignment == MOVE_ALIGNED, xmm0, Operand(src, 0x00));
4729 __ movdq(alignment == MOVE_ALIGNED, xmm1, Operand(src, 0x10));
4730 __ movdq(alignment == MOVE_ALIGNED, xmm2, Operand(src, 0x20));
4731 __ movdq(alignment == MOVE_ALIGNED, xmm3, Operand(src, 0x30));
4732 if (direction == FORWARD) __ add(src, Immediate(0x40));
4733 if (direction == BACKWARD) __ sub(dst, Immediate(0x40));
4734 __ movdqa(Operand(dst, 0x00), xmm0);
4735 __ movdqa(Operand(dst, 0x10), xmm1);
4736 __ movdqa(Operand(dst, 0x20), xmm2);
4737 __ movdqa(Operand(dst, 0x30), xmm3);
4738 if (direction == FORWARD) __ add(dst, Immediate(0x40));
4739 __ dec(loop_count);
4740 __ j(not_zero, &loop);
4741 // At most 63 bytes left to copy.
4742 __ bind(&move_last_63);
4743 __ test(count, Immediate(0x20));
4744 __ j(zero, &move_last_31);
4745 if (direction == BACKWARD) __ sub(src, Immediate(0x20));
4746 __ movdq(alignment == MOVE_ALIGNED, xmm0, Operand(src, 0x00));
4747 __ movdq(alignment == MOVE_ALIGNED, xmm1, Operand(src, 0x10));
4748 if (direction == FORWARD) __ add(src, Immediate(0x20));
4749 if (direction == BACKWARD) __ sub(dst, Immediate(0x20));
4750 __ movdqa(Operand(dst, 0x00), xmm0);
4751 __ movdqa(Operand(dst, 0x10), xmm1);
4752 if (direction == FORWARD) __ add(dst, Immediate(0x20));
4753 // At most 31 bytes left to copy.
4754 __ bind(&move_last_31);
4755 __ test(count, Immediate(0x10));
4756 __ j(zero, move_last_15);
4757 if (direction == BACKWARD) __ sub(src, Immediate(0x10));
4758 __ movdq(alignment == MOVE_ALIGNED, xmm0, Operand(src, 0));
4759 if (direction == FORWARD) __ add(src, Immediate(0x10));
4760 if (direction == BACKWARD) __ sub(dst, Immediate(0x10));
4761 __ movdqa(Operand(dst, 0), xmm0);
4762 if (direction == FORWARD) __ add(dst, Immediate(0x10));
4763}
4764
4765void MemMoveEmitPopAndReturn(MacroAssembler* masm) {
4766 __ pop(esi);
4767 __ pop(edi);
4768 __ ret(0);
4769}
4770
4771} // namespace
4772
4773void Builtins::Generate_MemMove(MacroAssembler* masm) {
4774 // Generated code is put into a fixed, unmovable buffer, and not into
4775 // the V8 heap. We can't, and don't, refer to any relocatable addresses
4776 // (e.g. the JavaScript nan-object).
4777
4778 // 32-bit C declaration function calls pass arguments on stack.
4779
4780 // Stack layout:
4781 // esp[12]: Third argument, size.
4782 // esp[8]: Second argument, source pointer.
4783 // esp[4]: First argument, destination pointer.
4784 // esp[0]: return address
4785
4786 const int kDestinationOffset = 1 * kSystemPointerSize;
4787 const int kSourceOffset = 2 * kSystemPointerSize;
4788 const int kSizeOffset = 3 * kSystemPointerSize;
4789
4790 // When copying up to this many bytes, use special "small" handlers.
4791 const size_t kSmallCopySize = 8;
4792 // When copying up to this many bytes, use special "medium" handlers.
4793 const size_t kMediumCopySize = 63;
4794 // When non-overlapping region of src and dst is less than this,
4795 // use a more careful implementation (slightly slower).
4796 const size_t kMinMoveDistance = 16;
4797 // Note that these values are dictated by the implementation below,
4798 // do not just change them and hope things will work!
4799
4800 int stack_offset = 0; // Update if we change the stack height.
4801
4802 Label backward, backward_much_overlap;
4803 Label forward_much_overlap, small_size, medium_size, pop_and_return;
4804 __ push(edi);
4805 __ push(esi);
4806 stack_offset += 2 * kSystemPointerSize;
4807 Register dst = edi;
4808 Register src = esi;
4809 Register count = ecx;
4810 Register loop_count = edx;
4811 __ mov(dst, Operand(esp, stack_offset + kDestinationOffset));
4812 __ mov(src, Operand(esp, stack_offset + kSourceOffset));
4813 __ mov(count, Operand(esp, stack_offset + kSizeOffset));
4814
4815 __ cmp(dst, src);
4816 __ j(equal, &pop_and_return);
4817
4818 __ prefetch(Operand(src, 0), 1);
4819 __ cmp(count, kSmallCopySize);
4820 __ j(below_equal, &small_size);
4821 __ cmp(count, kMediumCopySize);
4822 __ j(below_equal, &medium_size);
4823 __ cmp(dst, src);
4824 __ j(above, &backward);
4825
4826 {
4827 // |dst| is a lower address than |src|. Copy front-to-back.
4828 Label unaligned_source, move_last_15, skip_last_move;
4829 __ mov(eax, src);
4830 __ sub(eax, dst);
4831 __ cmp(eax, kMinMoveDistance);
4832 __ j(below, &forward_much_overlap);
4833 // Copy first 16 bytes.
4834 __ movdqu(xmm0, Operand(src, 0));
4835 __ movdqu(Operand(dst, 0), xmm0);
4836 // Determine distance to alignment: 16 - (dst & 0xF).
4837 __ mov(edx, dst);
4838 __ and_(edx, 0xF);
4839 __ neg(edx);
4840 __ add(edx, Immediate(16));
4841 __ add(dst, edx);
4842 __ add(src, edx);
4843 __ sub(count, edx);
4844 // dst is now aligned. Main copy loop.
4845 __ mov(loop_count, count);
4846 __ shr(loop_count, 6);
4847 // Check if src is also aligned.
4848 __ test(src, Immediate(0xF));
4849 __ j(not_zero, &unaligned_source);
4850 // Copy loop for aligned source and destination.
4851 MemMoveEmitMainLoop(masm, &move_last_15, FORWARD, MOVE_ALIGNED);
4852 // At most 15 bytes to copy. Copy 16 bytes at end of string.
4853 __ bind(&move_last_15);
4854 __ and_(count, 0xF);
4855 __ j(zero, &skip_last_move, Label::kNear);
4856 __ movdqu(xmm0, Operand(src, count, times_1, -0x10));
4857 __ movdqu(Operand(dst, count, times_1, -0x10), xmm0);
4858 __ bind(&skip_last_move);
4859 MemMoveEmitPopAndReturn(masm);
4860
4861 // Copy loop for unaligned source and aligned destination.
4862 __ bind(&unaligned_source);
4863 MemMoveEmitMainLoop(masm, &move_last_15, FORWARD, MOVE_UNALIGNED);
4864 __ jmp(&move_last_15);
4865
4866 // Less than kMinMoveDistance offset between dst and src.
4867 Label loop_until_aligned, last_15_much_overlap;
4868 __ bind(&loop_until_aligned);
4869 __ mov_b(eax, Operand(src, 0));
4870 __ inc(src);
4871 __ mov_b(Operand(dst, 0), eax);
4872 __ inc(dst);
4873 __ dec(count);
4874 __ bind(&forward_much_overlap); // Entry point into this block.
4875 __ test(dst, Immediate(0xF));
4876 __ j(not_zero, &loop_until_aligned);
4877 // dst is now aligned, src can't be. Main copy loop.
4878 __ mov(loop_count, count);
4879 __ shr(loop_count, 6);
4880 MemMoveEmitMainLoop(masm, &last_15_much_overlap, FORWARD, MOVE_UNALIGNED);
4881 __ bind(&last_15_much_overlap);
4882 __ and_(count, 0xF);
4883 __ j(zero, &pop_and_return);
4884 __ cmp(count, kSmallCopySize);
4885 __ j(below_equal, &small_size);
4886 __ jmp(&medium_size);
4887 }
4888
4889 {
4890 // |dst| is a higher address than |src|. Copy backwards.
4891 Label unaligned_source, move_first_15, skip_last_move;
4892 __ bind(&backward);
4893 // |dst| and |src| always point to the end of what's left to copy.
4894 __ add(dst, count);
4895 __ add(src, count);
4896 __ mov(eax, dst);
4897 __ sub(eax, src);
4898 __ cmp(eax, kMinMoveDistance);
4899 __ j(below, &backward_much_overlap);
4900 // Copy last 16 bytes.
4901 __ movdqu(xmm0, Operand(src, -0x10));
4902 __ movdqu(Operand(dst, -0x10), xmm0);
4903 // Find distance to alignment: dst & 0xF
4904 __ mov(edx, dst);
4905 __ and_(edx, 0xF);
4906 __ sub(dst, edx);
4907 __ sub(src, edx);
4908 __ sub(count, edx);
4909 // dst is now aligned. Main copy loop.
4910 __ mov(loop_count, count);
4911 __ shr(loop_count, 6);
4912 // Check if src is also aligned.
4913 __ test(src, Immediate(0xF));
4914 __ j(not_zero, &unaligned_source);
4915 // Copy loop for aligned source and destination.
4916 MemMoveEmitMainLoop(masm, &move_first_15, BACKWARD, MOVE_ALIGNED);
4917 // At most 15 bytes to copy. Copy 16 bytes at beginning of string.
4918 __ bind(&move_first_15);
4919 __ and_(count, 0xF);
4920 __ j(zero, &skip_last_move, Label::kNear);
4921 __ sub(src, count);
4922 __ sub(dst, count);
4923 __ movdqu(xmm0, Operand(src, 0));
4924 __ movdqu(Operand(dst, 0), xmm0);
4925 __ bind(&skip_last_move);
4926 MemMoveEmitPopAndReturn(masm);
4927
4928 // Copy loop for unaligned source and aligned destination.
4929 __ bind(&unaligned_source);
4930 MemMoveEmitMainLoop(masm, &move_first_15, BACKWARD, MOVE_UNALIGNED);
4931 __ jmp(&move_first_15);
4932
4933 // Less than kMinMoveDistance offset between dst and src.
4934 Label loop_until_aligned, first_15_much_overlap;
4935 __ bind(&loop_until_aligned);
4936 __ dec(src);
4937 __ dec(dst);
4938 __ mov_b(eax, Operand(src, 0));
4939 __ mov_b(Operand(dst, 0), eax);
4940 __ dec(count);
4941 __ bind(&backward_much_overlap); // Entry point into this block.
4942 __ test(dst, Immediate(0xF));
4943 __ j(not_zero, &loop_until_aligned);
4944 // dst is now aligned, src can't be. Main copy loop.
4945 __ mov(loop_count, count);
4946 __ shr(loop_count, 6);
4947 MemMoveEmitMainLoop(masm, &first_15_much_overlap, BACKWARD, MOVE_UNALIGNED);
4948 __ bind(&first_15_much_overlap);
4949 __ and_(count, 0xF);
4950 __ j(zero, &pop_and_return);
4951 // Small/medium handlers expect dst/src to point to the beginning.
4952 __ sub(dst, count);
4953 __ sub(src, count);
4954 __ cmp(count, kSmallCopySize);
4955 __ j(below_equal, &small_size);
4956 __ jmp(&medium_size);
4957 }
4958 {
4959 // Special handlers for 9 <= copy_size < 64. No assumptions about
4960 // alignment or move distance, so all reads must be unaligned and
4961 // must happen before any writes.
4962 Label f9_16, f17_32, f33_48, f49_63;
4963
4964 __ bind(&f9_16);
4965 __ movsd(xmm0, Operand(src, 0));
4966 __ movsd(xmm1, Operand(src, count, times_1, -8));
4967 __ movsd(Operand(dst, 0), xmm0);
4968 __ movsd(Operand(dst, count, times_1, -8), xmm1);
4969 MemMoveEmitPopAndReturn(masm);
4970
4971 __ bind(&f17_32);
4972 __ movdqu(xmm0, Operand(src, 0));
4973 __ movdqu(xmm1, Operand(src, count, times_1, -0x10));
4974 __ movdqu(Operand(dst, 0x00), xmm0);
4975 __ movdqu(Operand(dst, count, times_1, -0x10), xmm1);
4976 MemMoveEmitPopAndReturn(masm);
4977
4978 __ bind(&f33_48);
4979 __ movdqu(xmm0, Operand(src, 0x00));
4980 __ movdqu(xmm1, Operand(src, 0x10));
4981 __ movdqu(xmm2, Operand(src, count, times_1, -0x10));
4982 __ movdqu(Operand(dst, 0x00), xmm0);
4983 __ movdqu(Operand(dst, 0x10), xmm1);
4984 __ movdqu(Operand(dst, count, times_1, -0x10), xmm2);
4985 MemMoveEmitPopAndReturn(masm);
4986
4987 __ bind(&f49_63);
4988 __ movdqu(xmm0, Operand(src, 0x00));
4989 __ movdqu(xmm1, Operand(src, 0x10));
4990 __ movdqu(xmm2, Operand(src, 0x20));
4991 __ movdqu(xmm3, Operand(src, count, times_1, -0x10));
4992 __ movdqu(Operand(dst, 0x00), xmm0);
4993 __ movdqu(Operand(dst, 0x10), xmm1);
4994 __ movdqu(Operand(dst, 0x20), xmm2);
4995 __ movdqu(Operand(dst, count, times_1, -0x10), xmm3);
4996 MemMoveEmitPopAndReturn(masm);
4997
4998 __ bind(&medium_size); // Entry point into this block.
4999 __ mov(eax, count);
5000 __ dec(eax);
5001 __ shr(eax, 4);
5002 if (v8_flags.debug_code) {
5003 Label ok;
5004 __ cmp(eax, 3);
5005 __ j(below_equal, &ok);
5006 __ int3();
5007 __ bind(&ok);
5008 }
5009
5010 // Dispatch to handlers.
5011 Label eax_is_2_or_3;
5012
5013 __ cmp(eax, 1);
5014 __ j(greater, &eax_is_2_or_3);
5015 __ j(less, &f9_16); // eax == 0.
5016 __ jmp(&f17_32); // eax == 1.
5017
5018 __ bind(&eax_is_2_or_3);
5019 __ cmp(eax, 3);
5020 __ j(less, &f33_48); // eax == 2.
5021 __ jmp(&f49_63); // eax == 3.
5022 }
5023 {
5024 // Specialized copiers for copy_size <= 8 bytes.
5025 Label f0, f1, f2, f3, f4, f5_8;
5026 __ bind(&f0);
5027 MemMoveEmitPopAndReturn(masm);
5028
5029 __ bind(&f1);
5030 __ mov_b(eax, Operand(src, 0));
5031 __ mov_b(Operand(dst, 0), eax);
5032 MemMoveEmitPopAndReturn(masm);
5033
5034 __ bind(&f2);
5035 __ mov_w(eax, Operand(src, 0));
5036 __ mov_w(Operand(dst, 0), eax);
5037 MemMoveEmitPopAndReturn(masm);
5038
5039 __ bind(&f3);
5040 __ mov_w(eax, Operand(src, 0));
5041 __ mov_b(edx, Operand(src, 2));
5042 __ mov_w(Operand(dst, 0), eax);
5043 __ mov_b(Operand(dst, 2), edx);
5044 MemMoveEmitPopAndReturn(masm);
5045
5046 __ bind(&f4);
5047 __ mov(eax, Operand(src, 0));
5048 __ mov(Operand(dst, 0), eax);
5049 MemMoveEmitPopAndReturn(masm);
5050
5051 __ bind(&f5_8);
5052 __ mov(eax, Operand(src, 0));
5053 __ mov(edx, Operand(src, count, times_1, -4));
5054 __ mov(Operand(dst, 0), eax);
5055 __ mov(Operand(dst, count, times_1, -4), edx);
5056 MemMoveEmitPopAndReturn(masm);
5057
5058 __ bind(&small_size); // Entry point into this block.
5059 if (v8_flags.debug_code) {
5060 Label ok;
5061 __ cmp(count, 8);
5062 __ j(below_equal, &ok);
5063 __ int3();
5064 __ bind(&ok);
5065 }
5066
5067 // Dispatch to handlers.
5068 Label count_is_above_3, count_is_2_or_3;
5069
5070 __ cmp(count, 3);
5071 __ j(greater, &count_is_above_3);
5072
5073 __ cmp(count, 1);
5074 __ j(greater, &count_is_2_or_3);
5075 __ j(less, &f0); // count == 0.
5076 __ jmp(&f1); // count == 1.
5077
5078 __ bind(&count_is_2_or_3);
5079 __ cmp(count, 3);
5080 __ j(less, &f2); // count == 2.
5081 __ jmp(&f3); // count == 3.
5082
5083 __ bind(&count_is_above_3);
5084 __ cmp(count, 5);
5085 __ j(less, &f4); // count == 4.
5086 __ jmp(&f5_8); // count in [5, 8[.
5087 }
5088
5089 __ bind(&pop_and_return);
5090 MemMoveEmitPopAndReturn(masm);
5091}
5092
5093namespace {
5094
5095void Generate_DeoptimizationEntry(MacroAssembler* masm,
5096 DeoptimizeKind deopt_kind) {
5097 Isolate* isolate = masm->isolate();
5098
5099 // Save all general purpose registers before messing with them.
5101
5102 const int kXmmRegsSize = kSimd128Size * XMMRegister::kNumRegisters;
5103 __ AllocateStackSpace(kXmmRegsSize);
5104 const RegisterConfiguration* config = RegisterConfiguration::Default();
5106 config->num_allocatable_simd128_registers());
5107 DCHECK_EQ(config->num_allocatable_simd128_registers(),
5108 config->num_allocatable_double_registers());
5109 for (int i = 0; i < config->num_allocatable_simd128_registers(); ++i) {
5110 int code = config->GetAllocatableSimd128Code(i);
5111 XMMRegister xmm_reg = XMMRegister::from_code(code);
5112 int offset = code * kSimd128Size;
5113 __ movdqu(Operand(esp, offset), xmm_reg);
5114 }
5115
5116 __ pushad();
5117
5118 ExternalReference c_entry_fp_address =
5119 ExternalReference::Create(IsolateAddressId::kCEntryFPAddress, isolate);
5120 __ mov(masm->ExternalReferenceAsOperand(c_entry_fp_address, esi), ebp);
5121
5122 const int kSavedRegistersAreaSize =
5123 kNumberOfRegisters * kSystemPointerSize + kXmmRegsSize;
5124
5125 // Get the address of the location in the code object
5126 // and compute the fp-to-sp delta in register edx.
5127 __ mov(ecx, Operand(esp, kSavedRegistersAreaSize));
5128 __ lea(edx, Operand(esp, kSavedRegistersAreaSize + 1 * kSystemPointerSize));
5129
5130 __ sub(edx, ebp);
5131 __ neg(edx);
5132
5133 // Allocate a new deoptimizer object.
5134 __ PrepareCallCFunction(5, eax);
5135 __ mov(eax, Immediate(0));
5136 Label context_check;
5138 __ JumpIfSmi(edi, &context_check);
5139 __ mov(eax, Operand(ebp, StandardFrameConstants::kFunctionOffset));
5140 __ bind(&context_check);
5141 __ mov(Operand(esp, 0 * kSystemPointerSize), eax); // Function.
5142 __ mov(Operand(esp, 1 * kSystemPointerSize),
5143 Immediate(static_cast<int>(deopt_kind)));
5144 __ mov(Operand(esp, 2 * kSystemPointerSize),
5145 ecx); // InstructionStream address or 0.
5146 __ mov(Operand(esp, 3 * kSystemPointerSize), edx); // Fp-to-sp delta.
5147 __ Move(Operand(esp, 4 * kSystemPointerSize),
5149 {
5150 AllowExternalCallThatCantCauseGC scope(masm);
5151 __ CallCFunction(ExternalReference::new_deoptimizer_function(), 5);
5152 }
5153
5154 // Preserve deoptimizer object in register eax and get the input
5155 // frame descriptor pointer.
5156 __ mov(esi, Operand(eax, Deoptimizer::input_offset()));
5157
5158 // Fill in the input registers.
5159 for (int i = kNumberOfRegisters - 1; i >= 0; i--) {
5160 int offset =
5162 __ pop(Operand(esi, offset));
5163 }
5164
5165 int simd128_regs_offset = FrameDescription::simd128_registers_offset();
5166 // Fill in the xmm (simd128 / double) input registers.
5167 for (int i = 0; i < config->num_allocatable_simd128_registers(); ++i) {
5168 int code = config->GetAllocatableSimd128Code(i);
5169 int dst_offset = code * kSimd128Size + simd128_regs_offset;
5170 int src_offset = code * kSimd128Size;
5171 __ movdqu(xmm0, Operand(esp, src_offset));
5172 __ movdqu(Operand(esi, dst_offset), xmm0);
5173 }
5174
5175 // Clear FPU all exceptions.
5176 // TODO(ulan): Find out why the TOP register is not zero here in some cases,
5177 // and check that the generated code never deoptimizes with unbalanced stack.
5178 __ fnclex();
5179
5180 // Mark the stack as not iterable for the CPU profiler which won't be able to
5181 // walk the stack without the return address.
5182 __ mov_b(__ ExternalReferenceAsOperand(IsolateFieldId::kStackIsIterable),
5183 Immediate(0));
5184
5185 // Remove the return address and the xmm registers.
5186 __ add(esp, Immediate(kXmmRegsSize + 1 * kSystemPointerSize));
5187
5188 // Compute a pointer to the unwinding limit in register ecx; that is
5189 // the first stack slot not part of the input frame.
5190 __ mov(ecx, Operand(esi, FrameDescription::frame_size_offset()));
5191 __ add(ecx, esp);
5192
5193 // Unwind the stack down to - but not including - the unwinding
5194 // limit and copy the contents of the activation frame to the input
5195 // frame description.
5196 __ lea(edx, Operand(esi, FrameDescription::frame_content_offset()));
5197 Label pop_loop_header;
5198 __ jmp(&pop_loop_header);
5199 Label pop_loop;
5200 __ bind(&pop_loop);
5201 __ pop(Operand(edx, 0));
5202 __ add(edx, Immediate(sizeof(uint32_t)));
5203 __ bind(&pop_loop_header);
5204 __ cmp(ecx, esp);
5205 __ j(not_equal, &pop_loop);
5206
5207 // Compute the output frame in the deoptimizer.
5208 __ push(eax);
5209 __ PrepareCallCFunction(1, esi);
5210 __ mov(Operand(esp, 0 * kSystemPointerSize), eax);
5211 {
5212 AllowExternalCallThatCantCauseGC scope(masm);
5213 __ CallCFunction(ExternalReference::compute_output_frames_function(), 1);
5214 }
5215 __ pop(eax);
5216
5217 __ mov(esp, Operand(eax, Deoptimizer::caller_frame_top_offset()));
5218
5219 // Replace the current (input) frame with the output frames.
5220 Label outer_push_loop, inner_push_loop, outer_loop_header, inner_loop_header;
5221 // Outer loop state: eax = current FrameDescription**, edx = one
5222 // past the last FrameDescription**.
5223 __ mov(edx, Operand(eax, Deoptimizer::output_count_offset()));
5224 __ mov(eax, Operand(eax, Deoptimizer::output_offset()));
5225 __ lea(edx, Operand(eax, edx, times_system_pointer_size, 0));
5226 __ jmp(&outer_loop_header);
5227 __ bind(&outer_push_loop);
5228 // Inner loop state: esi = current FrameDescription*, ecx = loop
5229 // index.
5230 __ mov(esi, Operand(eax, 0));
5231 __ mov(ecx, Operand(esi, FrameDescription::frame_size_offset()));
5232 __ jmp(&inner_loop_header);
5233 __ bind(&inner_push_loop);
5234 __ sub(ecx, Immediate(sizeof(uint32_t)));
5236 __ bind(&inner_loop_header);
5237 __ test(ecx, ecx);
5238 __ j(not_zero, &inner_push_loop);
5239 __ add(eax, Immediate(kSystemPointerSize));
5240 __ bind(&outer_loop_header);
5241 __ cmp(eax, edx);
5242 __ j(below, &outer_push_loop);
5243
5244 // In case of a failed STUB, we have to restore the XMM registers.
5245 for (int i = 0; i < config->num_allocatable_simd128_registers(); ++i) {
5246 int code = config->GetAllocatableSimd128Code(i);
5247 XMMRegister xmm_reg = XMMRegister::from_code(code);
5248 int src_offset = code * kSimd128Size + simd128_regs_offset;
5249 __ movdqu(xmm_reg, Operand(esi, src_offset));
5250 }
5251
5252 // Push pc and continuation from the last output frame.
5253 __ push(Operand(esi, FrameDescription::pc_offset()));
5254 __ mov(eax, Operand(esi, FrameDescription::continuation_offset()));
5255 // Skip pushing the continuation if it is zero. This is used as a marker for
5256 // wasm deopts that do not use a builtin call to finish the deopt.
5257 Label push_registers;
5258 __ test(eax, eax);
5259 __ j(zero, &push_registers);
5260 __ push(eax);
5261 __ bind(&push_registers);
5262
5263 // Push the registers from the last output frame.
5264 for (int i = 0; i < kNumberOfRegisters; i++) {
5265 int offset =
5267 __ push(Operand(esi, offset));
5268 }
5269
5270 __ mov_b(__ ExternalReferenceAsOperand(IsolateFieldId::kStackIsIterable),
5271 Immediate(1));
5272
5273 // Restore the registers from the stack.
5274 __ popad();
5275
5276 __ InitializeRootRegister();
5277
5278 // Return to the continuation point.
5279 __ ret(0);
5280}
5281
5282} // namespace
5283
5284void Builtins::Generate_DeoptimizationEntry_Eager(MacroAssembler* masm) {
5285 Generate_DeoptimizationEntry(masm, DeoptimizeKind::kEager);
5286}
5287
5288void Builtins::Generate_DeoptimizationEntry_Lazy(MacroAssembler* masm) {
5289 Generate_DeoptimizationEntry(masm, DeoptimizeKind::kLazy);
5290}
5291
5292// If there is baseline code on the shared function info, converts an
5293// interpreter frame into a baseline frame and continues execution in baseline
5294// code. Otherwise execution continues with bytecode.
5295void Builtins::Generate_InterpreterOnStackReplacement_ToBaseline(
5296 MacroAssembler* masm) {
5297 Label start;
5298 __ bind(&start);
5299
5300 // Spill the accumulator register; note that we're not within a frame, so we
5301 // have to make sure to pop it before doing any GC-visible calls.
5303
5304 // Get function from the frame.
5305 Register closure = eax;
5307
5308 // Get the InstructionStream object from the shared function info.
5309 Register code_obj = esi;
5310 __ mov(code_obj,
5311 FieldOperand(closure, JSFunction::kSharedFunctionInfoOffset));
5312 __ mov(
5313 code_obj,
5314 FieldOperand(code_obj, SharedFunctionInfo::kTrustedFunctionDataOffset));
5315
5316 // For OSR entry it is safe to assume we always have baseline code.
5317 if (v8_flags.debug_code) {
5318 __ CmpObjectType(code_obj, CODE_TYPE, kInterpreterBytecodeOffsetRegister);
5319 __ Assert(equal, AbortReason::kExpectedBaselineData);
5320 AssertCodeIsBaseline(masm, code_obj, ecx);
5321 }
5322
5323 // Load the feedback cell and vector.
5324 Register feedback_cell = eax;
5325 Register feedback_vector = ecx;
5326 __ mov(feedback_cell, FieldOperand(closure, JSFunction::kFeedbackCellOffset));
5327 closure = no_reg;
5328 __ mov(feedback_vector,
5329 FieldOperand(feedback_cell, FeedbackCell::kValueOffset));
5330
5331 Label install_baseline_code;
5332 // Check if feedback vector is valid. If not, call prepare for baseline to
5333 // allocate it.
5334 __ CmpObjectType(feedback_vector, FEEDBACK_VECTOR_TYPE,
5336 __ j(not_equal, &install_baseline_code);
5337
5338 // Save BytecodeOffset from the stack frame.
5342 // Replace bytecode offset with feedback cell.
5346 feedback_cell);
5347 feedback_cell = no_reg;
5348 // Update feedback vector cache.
5352 feedback_vector);
5353 feedback_vector = no_reg;
5354
5355 // Compute baseline pc for bytecode offset.
5356 Register get_baseline_pc = ecx;
5357 __ LoadAddress(get_baseline_pc,
5358 ExternalReference::baseline_pc_for_next_executed_bytecode());
5359
5362
5363 // Get bytecode array from the stack frame.
5366 {
5367 FrameScope scope(masm, StackFrame::INTERNAL);
5368 __ PrepareCallCFunction(3, eax);
5369 __ mov(Operand(esp, 0 * kSystemPointerSize), code_obj);
5370 __ mov(Operand(esp, 1 * kSystemPointerSize),
5372 __ mov(Operand(esp, 2 * kSystemPointerSize),
5374 __ CallCFunction(get_baseline_pc, 3);
5375 }
5376 __ LoadCodeInstructionStart(code_obj, code_obj);
5377 __ add(code_obj, kReturnRegister0);
5379
5380 DCHECK_EQ(feedback_cell, no_reg);
5381 closure = ecx;
5383 ResetJSFunctionAge(masm, closure, closure);
5384 Generate_OSREntry(masm, code_obj);
5385 __ Trap(); // Unreachable.
5386
5387 __ bind(&install_baseline_code);
5388 // Pop/re-push the accumulator so that it's spilled within the below frame
5389 // scope, to keep the stack valid.
5391 // Restore the clobbered context register.
5392 __ mov(kContextRegister,
5394 {
5395 FrameScope scope(masm, StackFrame::INTERNAL);
5397 // Reload closure.
5398 closure = eax;
5400 __ Push(closure);
5401 __ CallRuntime(Runtime::kInstallBaselineCode, 1);
5403 }
5404 // Retry from the start after installing baseline code.
5405 __ jmp(&start);
5406}
5407
5408void Builtins::Generate_RestartFrameTrampoline(MacroAssembler* masm) {
5409 // Frame is being dropped:
5410 // - Look up current function on the frame.
5411 // - Leave the frame.
5412 // - Restart the frame by calling the function.
5413
5414 __ mov(edi, Operand(ebp, StandardFrameConstants::kFunctionOffset));
5415 __ mov(eax, Operand(ebp, StandardFrameConstants::kArgCOffset));
5416
5417 __ LeaveFrame(StackFrame::INTERPRETED);
5418
5419 // The arguments are already in the stack (including any necessary padding),
5420 // we should not try to massage the arguments again.
5421 __ mov(ecx, Immediate(kDontAdaptArgumentsSentinel));
5422 __ mov(esi, FieldOperand(edi, JSFunction::kContextOffset));
5423 __ InvokeFunctionCode(edi, no_reg, ecx, eax, InvokeType::kJump);
5424}
5425
5426#undef __
5427
5428} // namespace internal
5429} // namespace v8
5430
5431#endif // V8_TARGET_ARCH_IA32
#define Assert(condition)
#define JUMP_IF_EQUAL(NAME)
interpreter::Bytecode bytecode
Definition builtins.cc:43
#define RETURN_BYTECODE_LIST(V)
Definition bytecodes.h:575
static constexpr uint64_t kSignificandMask
Definition double.h:29
static constexpr int kPhysicalSignificandSize
Definition double.h:31
static constexpr uint64_t kHiddenBit
Definition double.h:30
static void Generate_InterpreterPushArgsThenConstructImpl(MacroAssembler *masm, InterpreterPushArgsMode mode)
static void Generate_CallOrConstructForwardVarargs(MacroAssembler *masm, CallOrConstructMode mode, Builtin target_builtin)
static CallInterfaceDescriptor CallInterfaceDescriptorFor(Builtin builtin)
Definition builtins.cc:189
static void Generate_InterpreterEntryTrampoline(MacroAssembler *masm, InterpreterEntryTrampolineMode mode)
static void Generate_Adaptor(MacroAssembler *masm, int formal_parameter_count, Address builtin_address)
static void Generate_CEntry(MacroAssembler *masm, int result_size, ArgvMode argv_mode, bool builtin_exit_frame, bool switch_to_central_stack)
static constexpr Builtin CallFunction(ConvertReceiverMode=ConvertReceiverMode::kAny)
static constexpr Builtin AdaptorWithBuiltinExitFrame(int formal_parameter_count)
static void Generate_Call(MacroAssembler *masm, ConvertReceiverMode mode)
static void Generate_CallFunction(MacroAssembler *masm, ConvertReceiverMode mode)
static void Generate_CallOrConstructVarargs(MacroAssembler *masm, Builtin target_builtin)
static void Generate_CallApiCallbackImpl(MacroAssembler *masm, CallApiCallbackMode mode)
static constexpr Builtin Call(ConvertReceiverMode=ConvertReceiverMode::kAny)
static void Generate_CallBoundFunctionImpl(MacroAssembler *masm)
static void Generate_ConstructForwardAllArgsImpl(MacroAssembler *masm, ForwardWhichFrame which_frame)
static void Generate_InterpreterPushArgsThenCallImpl(MacroAssembler *masm, ConvertReceiverMode receiver_mode, InterpreterPushArgsMode mode)
static DEFINE_PARAMETERS_VARARGS(kActualArgumentsCount, kTopmostScriptHavingContext, kFunctionTemplateInfo) DEFINE_PARAMETER_TYPES(MachineType constexpr Register TopmostScriptHavingContextRegister()
static DEFINE_PARAMETERS_VARARGS(kApiFunctionAddress, kActualArgumentsCount, kFunctionTemplateInfo) DEFINE_PARAMETER_TYPES(MachineType constexpr Register ActualArgumentsCountRegister()
static constexpr int kContextOrFrameTypeOffset
static constexpr int kFixedSlotCountAboveFp
static constexpr int kFixedFrameSizeAboveFp
static bool IsSupported(CpuFeature f)
static int caller_frame_top_offset()
static int output_count_offset()
static constexpr int kRootRegisterValueOffset
static constexpr int kMicrotaskQueueArgOffset
static V8_EXPORT_PRIVATE ExternalReference isolate_address()
static ExternalReference Create(const SCTableReference &table_ref)
static constexpr int simd128_registers_offset()
static const int kMantissaBits
Definition heap-number.h:39
static const uint32_t kExponentMask
Definition heap-number.h:37
static const int kExponentBias
Definition heap-number.h:41
static const int kExponentShift
Definition heap-number.h:42
static constexpr int kHeaderSize
static constexpr int kMapOffset
static constexpr uint32_t thread_in_wasm_flag_address_offset()
Definition isolate.h:1338
static constexpr XMMRegister from_code(int8_t code)
constexpr int8_t code() const
static const RegisterConfiguration * Default()
static constexpr Register from_code(int code)
static constexpr Tagged< Smi > FromInt(int value)
Definition smi.h:38
static constexpr Tagged< Smi > zero()
Definition smi.h:99
static constexpr int32_t TypeToMarker(Type type)
Definition frames.h:196
static constexpr int kFixedFrameSizeFromFp
static constexpr int kFrameTypeOffset
static constexpr DoubleRegList kPushedFpRegs
static constexpr int SharedFunctionInfoOffsetInTaggedJSFunction()
#define ASM_CODE_COMMENT_STRING(asm,...)
Definition assembler.h:618
#define ASM_CODE_COMMENT(asm)
Definition assembler.h:617
#define V8_ENABLE_SANDBOX_BOOL
Definition globals.h:160
int start
int end
base::Vector< const DirectHandle< Object > > args
Definition execution.cc:74
bool is_construct
Definition execution.cc:82
int32_t offset
TNode< Context > context
TNode< Object > receiver
ArrayReduceDirection direction
TNode< Object > callback
LiftoffRegister reg
Register tmp
int pc_offset
const int length_
Definition mul-fft.cc:473
V8_INLINE Dest bit_cast(Source const &source)
Definition macros.h:95
auto Reversed(T &t)
Definition iterator.h:105
ApiCallbackExitFrameConstants FC
Definition frames.cc:1270
FunctionCallbackArguments FCA
Definition frames.cc:1271
void push(LiftoffAssembler *assm, LiftoffRegister reg, ValueKind kind, int padding=0)
constexpr int kStackStateOffset
Definition stacks.h:212
constexpr DoubleRegister kFpReturnRegisters[]
constexpr int kStackSpOffset
Definition stacks.h:202
constexpr int kStackFpOffset
Definition stacks.h:204
constexpr Register kGpParamRegisters[]
constexpr DoubleRegister kFpParamRegisters[]
constexpr int kStackParentOffset
Definition stacks.h:210
constexpr Register kGpReturnRegisters[]
constexpr int kStackLimitOffset
Definition stacks.h:208
constexpr int kStackPcOffset
Definition stacks.h:206
constexpr Register no_reg
constexpr Register kRootRegister
constexpr int kByteSize
Definition globals.h:395
constexpr int kFunctionEntryBytecodeOffset
Definition globals.h:854
constexpr Register kRuntimeCallFunctionRegister
constexpr int kSimd128Size
Definition globals.h:706
DwVfpRegister DoubleRegister
constexpr Register kRuntimeCallArgvRegister
static void Generate_InterpreterEnterBytecode(MacroAssembler *masm)
constexpr Register kJavaScriptCallTargetRegister
constexpr int kNumberOfRegisters
Operand FieldOperand(Register object, int offset)
constexpr uint16_t kDontAdaptArgumentsSentinel
Definition globals.h:2779
constexpr Register kJavaScriptCallArgCountRegister
constexpr Register kInterpreterAccumulatorRegister
constexpr int kSystemPointerSizeLog2
Definition globals.h:494
InterpreterPushArgsMode
Definition globals.h:2233
constexpr int kJSArgcReceiverSlots
Definition globals.h:2778
static void GenerateInterpreterPushArgs(MacroAssembler *masm, Register num_args, Register start_address, Register scratch)
static void AdvanceBytecodeOffsetOrReturn(MacroAssembler *masm, Register bytecode_array, Register bytecode_offset, Register bytecode, Register scratch1, Register scratch2, Register scratch3, Label *if_return)
constexpr int kSystemPointerSize
Definition globals.h:410
static void LeaveInterpreterFrame(MacroAssembler *masm, Register scratch1, Register scratch2)
constexpr Register kReturnRegister1
constexpr uint32_t kZapValue
Definition globals.h:1005
constexpr Register kReturnRegister0
@ LAST_CALLABLE_JS_FUNCTION_TYPE
@ FIRST_CALLABLE_JS_FUNCTION_TYPE
constexpr Register kWasmImplicitArgRegister
constexpr Register kContextRegister
V8_EXPORT_PRIVATE bool AreAliased(const CPURegister &reg1, const CPURegister &reg2, const CPURegister &reg3=NoReg, const CPURegister &reg4=NoReg, const CPURegister &reg5=NoReg, const CPURegister &reg6=NoReg, const CPURegister &reg7=NoReg, const CPURegister &reg8=NoReg)
constexpr Register kRuntimeCallArgCountRegister
constexpr Register kInterpreterDispatchTableRegister
const int kHeapObjectTag
Definition v8-internal.h:72
V8_EXPORT_PRIVATE FlagValues v8_flags
constexpr Register kJavaScriptCallExtraArg1Register
constexpr int JSParameterCount(int param_count_without_receiver)
Definition globals.h:2782
constexpr Register kJavaScriptCallCodeStartRegister
Register ReassignRegister(Register &source)
constexpr Register kWasmCompileLazyFuncIndexRegister
return value
Definition map-inl.h:893
static void AssertCodeIsBaseline(MacroAssembler *masm, Register code, Register scratch)
static void Generate_JSEntryTrampolineHelper(MacroAssembler *masm, bool is_construct)
void CallApiFunctionAndReturn(MacroAssembler *masm, bool with_profiling, Register function_address, ExternalReference thunk_ref, Register thunk_arg, int slots_to_drop_on_return, MemOperand *argc_operand, MemOperand return_value_operand)
constexpr int kDoubleSize
Definition globals.h:407
static void GetSharedFunctionInfoBytecodeOrBaseline(MacroAssembler *masm, Register sfi, Register bytecode, Register scratch1, Label *is_baseline, Label *is_unavailable)
constexpr Register kInterpreterBytecodeOffsetRegister
constexpr Register kJavaScriptCallNewTargetRegister
constexpr Register kJSFunctionRegister
MemOperand ExitFrameStackSlotOperand(int offset)
constexpr Register kInterpreterBytecodeArrayRegister
uint32_t test
#define shr(value, bits)
Definition sha-256.cc:31
#define CHECK(condition)
Definition logging.h:124
#define DCHECK_NE(v1, v2)
Definition logging.h:486
#define DCHECK_GE(v1, v2)
Definition logging.h:488
#define CHECK_EQ(lhs, rhs)
#define DCHECK(condition)
Definition logging.h:482
#define DCHECK_EQ(v1, v2)
Definition logging.h:485
#define arraysize(array)
Definition macros.h:67
#define OFFSET_OF_DATA_START(Type)