v8
V8 is Google’s open source high-performance JavaScript and WebAssembly engine, written in C++.
Loading...
Searching...
No Matches
builtins-s390.cc
Go to the documentation of this file.
1// Copyright 2014 the V8 project authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
5#if V8_TARGET_ARCH_S390X
6
12// For interpreter_entry_return_pc_offset. TODO(jkummerow): Drop.
15#include "src/debug/debug.h"
19#include "src/heap/heap-inl.h"
21#include "src/objects/cell.h"
22#include "src/objects/foreign.h"
25#include "src/objects/smi.h"
26#include "src/runtime/runtime.h"
27
28#if V8_ENABLE_WEBASSEMBLY
33#endif // V8_ENABLE_WEBASSEMBLY
34
35namespace v8 {
36namespace internal {
37
38#define __ ACCESS_MASM(masm)
39
40namespace {
41
42static void AssertCodeIsBaseline(MacroAssembler* masm, Register code,
43 Register scratch) {
44 DCHECK(!AreAliased(code, scratch));
45 // Verify that the code kind is baseline code via the CodeKind.
46 __ LoadU32(scratch, FieldMemOperand(code, Code::kFlagsOffset));
47 __ DecodeField<Code::KindField>(scratch);
48 __ CmpS64(scratch, Operand(static_cast<int>(CodeKind::BASELINE)));
49 __ Assert(eq, AbortReason::kExpectedBaselineData);
50}
51
53 MacroAssembler* masm, Register sfi, Register bytecode, Register scratch1,
54 Label* is_baseline, Label* is_unavailable) {
56 ASM_CODE_COMMENT(masm);
57 Label done;
58
59 Register data = bytecode;
60 __ LoadTaggedField(
61 data,
62 FieldMemOperand(sfi, SharedFunctionInfo::kTrustedFunctionDataOffset));
63
64 __ LoadMap(scratch1, data);
65 __ LoadU16(scratch1, FieldMemOperand(scratch1, Map::kInstanceTypeOffset));
66
67#ifndef V8_JITLESS
68 __ CmpS32(scratch1, Operand(CODE_TYPE));
69 if (v8_flags.debug_code) {
70 Label not_baseline;
71 __ b(ne, &not_baseline);
72 AssertCodeIsBaseline(masm, data, scratch1);
73 __ beq(is_baseline);
74 __ bind(&not_baseline);
75 } else {
76 __ beq(is_baseline);
77 }
78#endif // !V8_JITLESS
79
80 __ CmpS32(scratch1, Operand(BYTECODE_ARRAY_TYPE));
81 __ b(eq, &done);
82
83 __ CmpS32(scratch1, Operand(INTERPRETER_DATA_TYPE));
84 __ b(ne, is_unavailable);
85 __ LoadTaggedField(
86 data, FieldMemOperand(data, InterpreterData::kBytecodeArrayOffset));
87
88 __ bind(&done);
89}
90
91void Generate_OSREntry(MacroAssembler* masm, Register entry_address,
92 Operand offset) {
93 if (!offset.is_reg() && is_int20(offset.immediate())) {
94 __ lay(r14, MemOperand(entry_address, offset.immediate()));
95 } else {
96 DCHECK(offset.is_reg());
97 __ AddS64(r14, entry_address, offset.rm());
98 }
99
100 // "return" to the OSR entry point of the function.
101 __ Ret();
102}
103
104void ResetSharedFunctionInfoAge(MacroAssembler* masm, Register sfi,
105 Register scratch) {
106 DCHECK(!AreAliased(sfi, scratch));
107 __ mov(scratch, Operand(0));
108 __ StoreU16(scratch, FieldMemOperand(sfi, SharedFunctionInfo::kAgeOffset),
109 no_reg);
110}
111
112void ResetJSFunctionAge(MacroAssembler* masm, Register js_function,
113 Register scratch1, Register scratch2) {
114 __ LoadTaggedField(
115 scratch1,
116 FieldMemOperand(js_function, JSFunction::kSharedFunctionInfoOffset));
117 ResetSharedFunctionInfoAge(masm, scratch1, scratch2);
118}
119
120void ResetFeedbackVectorOsrUrgency(MacroAssembler* masm,
121 Register feedback_vector, Register scratch) {
122 DCHECK(!AreAliased(feedback_vector, scratch));
123 __ LoadU8(scratch,
124 FieldMemOperand(feedback_vector, FeedbackVector::kOsrStateOffset));
125 __ AndP(scratch, scratch, Operand(~FeedbackVector::OsrUrgencyBits::kMask));
126 __ StoreU8(scratch,
127 FieldMemOperand(feedback_vector, FeedbackVector::kOsrStateOffset));
128}
129
130} // namespace
131
132// If there is baseline code on the shared function info, converts an
133// interpreter frame into a baseline frame and continues execution in baseline
134// code. Otherwise execution continues with bytecode.
135void Builtins::Generate_InterpreterOnStackReplacement_ToBaseline(
136 MacroAssembler* masm) {
137 Label start;
138 __ bind(&start);
139
140 // Get function from the frame.
141 Register closure = r3;
143
144 // Get the InstructionStream object from the shared function info.
145 Register code_obj = r8;
146 __ LoadTaggedField(
147 code_obj,
148 FieldMemOperand(closure, JSFunction::kSharedFunctionInfoOffset));
149
150 ResetSharedFunctionInfoAge(masm, code_obj, r5);
151
152 __ LoadTaggedField(
153 code_obj, FieldMemOperand(
154 code_obj, SharedFunctionInfo::kTrustedFunctionDataOffset));
155
156 // For OSR entry it is safe to assume we always have baseline code.
157 if (v8_flags.debug_code) {
158 __ CompareObjectType(code_obj, r5, r5, CODE_TYPE);
159 __ Assert(eq, AbortReason::kExpectedBaselineData);
160 AssertCodeIsBaseline(masm, code_obj, r5);
161 }
162
163 // Load the feedback cell and vector.
164 Register feedback_cell = r4;
165 Register feedback_vector = r1;
166 __ LoadTaggedField(feedback_cell,
167 FieldMemOperand(closure, JSFunction::kFeedbackCellOffset));
168 __ LoadTaggedField(
169 feedback_vector,
170 FieldMemOperand(feedback_cell, FeedbackCell::kValueOffset));
171
172 Label install_baseline_code;
173 // Check if feedback vector is valid. If not, call prepare for baseline to
174 // allocate it.
175 __ CompareObjectType(feedback_vector, r5, r5, FEEDBACK_VECTOR_TYPE);
176 __ b(ne, &install_baseline_code);
177
178 // Save BytecodeOffset from the stack frame.
182 // Replace bytecode offset with feedback cell.
185 __ StoreU64(feedback_cell,
187 feedback_cell = no_reg;
188 // Update feedback vector cache.
191 __ StoreU64(feedback_vector,
193 feedback_vector = no_reg;
194
195 // Compute baseline pc for bytecode offset.
196 Register get_baseline_pc = r5;
197 __ Move(get_baseline_pc,
198 ExternalReference::baseline_pc_for_next_executed_bytecode());
199
203
204 // Get bytecode array from the stack frame.
207 // Save the accumulator register, since it's clobbered by the below call.
209 {
210 __ mov(kCArgRegs[0], code_obj);
213 FrameScope scope(masm, StackFrame::INTERNAL);
214 __ PrepareCallCFunction(3, 0, r1);
215 __ CallCFunction(get_baseline_pc, 3, 0);
216 }
217 __ LoadCodeInstructionStart(code_obj, code_obj);
218 __ AddS64(code_obj, code_obj, kReturnRegister0);
220
221 Generate_OSREntry(masm, code_obj, Operand(0));
222 __ Trap(); // Unreachable.
223
224 __ bind(&install_baseline_code);
225 {
226 FrameScope scope(masm, StackFrame::INTERNAL);
228 __ Push(closure);
229 __ CallRuntime(Runtime::kInstallBaselineCode, 1);
231 }
232 // Retry from the start after installing baseline code.
233 __ b(&start);
234}
235
236namespace {
237
238enum class OsrSourceTier {
239 kInterpreter,
240 kBaseline,
241};
242
243void OnStackReplacement(MacroAssembler* masm, OsrSourceTier source,
244 Register maybe_target_code,
245 Register expected_param_count) {
246 Label jump_to_optimized_code;
247 {
248 // If maybe_target_code is not null, no need to call into runtime. A
249 // precondition here is: if maybe_target_code is an InstructionStream
250 // object, it must NOT be marked_for_deoptimization (callers must ensure
251 // this).
252 __ CmpSmiLiteral(maybe_target_code, Smi::zero(), r0);
253 __ bne(&jump_to_optimized_code);
254 }
255
256 ASM_CODE_COMMENT(masm);
257 {
258 FrameScope scope(masm, StackFrame::INTERNAL);
259 __ CallRuntime(Runtime::kCompileOptimizedOSR);
260 }
261
262 // If the code object is null, just return to the caller.
263 __ CmpSmiLiteral(r2, Smi::zero(), r0);
264 __ bne(&jump_to_optimized_code);
265 __ Ret();
266
267 __ bind(&jump_to_optimized_code);
268 DCHECK_EQ(maybe_target_code, r2); // Already in the right spot.
269
270 // OSR entry tracing.
271 {
272 Label next;
273 __ Move(r3, ExternalReference::address_of_log_or_trace_osr());
274 __ LoadU8(r3, MemOperand(r3));
275 __ tmll(r3, Operand(0xFF)); // Mask to the LSB.
276 __ beq(&next);
277
278 {
279 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
280 __ Push(r2); // Preserve the code object.
281 __ CallRuntime(Runtime::kLogOrTraceOptimizedOSREntry, 0);
282 __ Pop(r2);
283 }
284
285 __ bind(&next);
286 }
287
288 if (source == OsrSourceTier::kInterpreter) {
289 // Drop the handler frame that is be sitting on top of the actual
290 // JavaScript frame. This is the case then OSR is triggered from bytecode.
291 __ LeaveFrame(StackFrame::STUB);
292 }
293
294 // The sandbox would rely on testing expected_parameter_count here.
295 static_assert(!V8_ENABLE_SANDBOX_BOOL);
296
297 // Load deoptimization data from the code object.
298 // <deopt_data> = <code>[#deoptimization_data_offset]
299 __ LoadTaggedField(
300 r3,
301 FieldMemOperand(r2, Code::kDeoptimizationDataOrInterpreterDataOffset));
302
303 // Load the OSR entrypoint offset from the deoptimization data.
304 // <osr_offset> = <deopt_data>[#header_size + #osr_pc_offset]
305 __ SmiUntagField(
308
309 __ LoadCodeInstructionStart(r2, r2);
310
311 // Compute the target address = code_entry + osr_offset
312 // <entry_addr> = <code_entry> + <osr_offset>
313 Generate_OSREntry(masm, r2, Operand(r3));
314}
315
316} // namespace
317
318void Builtins::Generate_Adaptor(MacroAssembler* masm,
319 int formal_parameter_count, Address address) {
321 __ TailCallBuiltin(
322 Builtins::AdaptorWithBuiltinExitFrame(formal_parameter_count));
323}
324
325namespace {
326
327enum class ArgumentsElementType {
328 kRaw, // Push arguments as they are.
329 kHandle // Dereference arguments before pushing.
330};
331
332void Generate_PushArguments(MacroAssembler* masm, Register array, Register argc,
333 Register scratch,
334 ArgumentsElementType element_type) {
335 DCHECK(!AreAliased(array, argc, scratch));
336 Register counter = scratch;
337 Register value = ip;
338 Label loop, entry;
339 __ SubS64(counter, argc, Operand(kJSArgcReceiverSlots));
340 __ b(&entry);
341 __ bind(&loop);
342 __ ShiftLeftU64(value, counter, Operand(kSystemPointerSizeLog2));
343 __ LoadU64(value, MemOperand(array, value));
344 if (element_type == ArgumentsElementType::kHandle) {
345 __ LoadU64(value, MemOperand(value));
346 }
347 __ push(value);
348 __ bind(&entry);
349 __ SubS64(counter, counter, Operand(1));
350 __ bge(&loop);
351}
352
353void Generate_JSBuiltinsConstructStubHelper(MacroAssembler* masm) {
354 // ----------- S t a t e -------------
355 // -- r2 : number of arguments
356 // -- r3 : constructor function
357 // -- r5 : new target
358 // -- cp : context
359 // -- lr : return address
360 // -- sp[...]: constructor arguments
361 // -----------------------------------
362
363 Register scratch = r4;
364 Label stack_overflow;
365
366 __ StackOverflowCheck(r2, scratch, &stack_overflow);
367
368 // Enter a construct frame.
369 {
370 FrameAndConstantPoolScope scope(masm, StackFrame::CONSTRUCT);
371
372 // Preserve the incoming parameters on the stack.
373 __ Push(cp, r2);
374
375 // TODO(victorgomes): When the arguments adaptor is completely removed, we
376 // should get the formal parameter count and copy the arguments in its
377 // correct position (including any undefined), instead of delaying this to
378 // InvokeFunction.
379
380 // Set up pointer to first argument (skip receiver).
383 // Copy arguments and receiver to the expression stack.
384 // r6: Pointer to start of arguments.
385 // r2: Number of arguments.
386 Generate_PushArguments(masm, r6, r2, r1, ArgumentsElementType::kRaw);
387
388 // The receiver for the builtin/api call.
389 __ PushRoot(RootIndex::kTheHoleValue);
390
391 // Call the function.
392 // r2: number of arguments
393 // r3: constructor function
394 // r5: new target
395
396 __ InvokeFunctionWithNewTarget(r3, r5, r2, InvokeType::kCall);
397
398 // Restore context from the frame.
400 // Restore arguments count from the frame.
402
403 // Leave construct frame.
404 }
405 // Remove caller arguments from the stack and return.
406 __ DropArguments(scratch);
407 __ Ret();
408
409 __ bind(&stack_overflow);
410 {
411 FrameScope scope(masm, StackFrame::INTERNAL);
412 __ CallRuntime(Runtime::kThrowStackOverflow);
413 __ bkpt(0); // Unreachable code.
414 }
415}
416
417} // namespace
418
419// The construct stub for ES5 constructor functions and ES6 class constructors.
420void Builtins::Generate_JSConstructStubGeneric(MacroAssembler* masm) {
421 // ----------- S t a t e -------------
422 // -- r2: number of arguments (untagged)
423 // -- r3: constructor function
424 // -- r5: new target
425 // -- cp: context
426 // -- lr: return address
427 // -- sp[...]: constructor arguments
428 // -----------------------------------
429
430 FrameScope scope(masm, StackFrame::MANUAL);
431 // Enter a construct frame.
432 Label post_instantiation_deopt_entry, not_create_implicit_receiver;
433 __ EnterFrame(StackFrame::CONSTRUCT);
434
435 // Preserve the incoming parameters on the stack.
436 __ Push(cp, r2, r3);
437 __ PushRoot(RootIndex::kUndefinedValue);
438 __ Push(r5);
439
440 // ----------- S t a t e -------------
441 // -- sp[0*kSystemPointerSize]: new target
442 // -- sp[1*kSystemPointerSize]: padding
443 // -- r3 and sp[2*kSystemPointerSize]: constructor function
444 // -- sp[3*kSystemPointerSize]: number of arguments
445 // -- sp[4*kSystemPointerSize]: context
446 // -----------------------------------
447
448 __ LoadTaggedField(
449 r6, FieldMemOperand(r3, JSFunction::kSharedFunctionInfoOffset));
450 __ LoadU32(r6, FieldMemOperand(r6, SharedFunctionInfo::kFlagsOffset));
451 __ DecodeField<SharedFunctionInfo::FunctionKindBits>(r6);
452 __ JumpIfIsInRange(
453 r6, r6, static_cast<uint8_t>(FunctionKind::kDefaultDerivedConstructor),
454 static_cast<uint8_t>(FunctionKind::kDerivedConstructor),
455 &not_create_implicit_receiver);
456
457 // If not derived class constructor: Allocate the new receiver object.
458 __ CallBuiltin(Builtin::kFastNewObject);
459 __ b(&post_instantiation_deopt_entry);
460
461 // Else: use TheHoleValue as receiver for constructor call
462 __ bind(&not_create_implicit_receiver);
463 __ LoadRoot(r2, RootIndex::kTheHoleValue);
464
465 // ----------- S t a t e -------------
466 // -- r2: receiver
467 // -- Slot 4 / sp[0*kSystemPointerSize]: new target
468 // -- Slot 3 / sp[1*kSystemPointerSize]: padding
469 // -- Slot 2 / sp[2*kSystemPointerSize]: constructor function
470 // -- Slot 1 / sp[3*kSystemPointerSize]: number of arguments
471 // -- Slot 0 / sp[4*kSystemPointerSize]: context
472 // -----------------------------------
473 // Deoptimizer enters here.
474 masm->isolate()->heap()->SetConstructStubCreateDeoptPCOffset(
475 masm->pc_offset());
476 __ bind(&post_instantiation_deopt_entry);
477
478 // Restore new target.
479 __ Pop(r5);
480
481 // Push the allocated receiver to the stack.
482 __ Push(r2);
483 // We need two copies because we may have to return the original one
484 // and the calling conventions dictate that the called function pops the
485 // receiver. The second copy is pushed after the arguments, we saved in r6
486 // since r0 needs to store the number of arguments before
487 // InvokingFunction.
488 __ mov(r8, r2);
489
490 // Set up pointer to first argument (skip receiver).
493
494 // ----------- S t a t e -------------
495 // -- r5: new target
496 // -- sp[0*kSystemPointerSize]: implicit receiver
497 // -- sp[1*kSystemPointerSize]: implicit receiver
498 // -- sp[2*kSystemPointerSize]: padding
499 // -- sp[3*kSystemPointerSize]: constructor function
500 // -- sp[4*kSystemPointerSize]: number of arguments
501 // -- sp[5*kSystemPointerSize]: context
502 // -----------------------------------
503
504 // Restore constructor function and argument count.
507
508 Label stack_overflow;
509 __ StackOverflowCheck(r2, r7, &stack_overflow);
510
511 // Copy arguments and receiver to the expression stack.
512 // r6: Pointer to start of argument.
513 // r2: Number of arguments.
514 Generate_PushArguments(masm, r6, r2, r1, ArgumentsElementType::kRaw);
515
516 // Push implicit receiver.
517 __ Push(r8);
518
519 // Call the function.
520 __ InvokeFunctionWithNewTarget(r3, r5, r2, InvokeType::kCall);
521
522 // If the result is an object (in the ECMA sense), we should get rid
523 // of the receiver and use the result; see ECMA-262 section 13.2.2-7
524 // on page 74.
525 Label use_receiver, do_throw, leave_and_return, check_receiver;
526
527 // If the result is undefined, we jump out to using the implicit receiver.
528 __ JumpIfNotRoot(r2, RootIndex::kUndefinedValue, &check_receiver);
529
530 // Otherwise we do a smi check and fall through to check if the return value
531 // is a valid receiver.
532
533 // Throw away the result of the constructor invocation and use the
534 // on-stack receiver as the result.
535 __ bind(&use_receiver);
536 __ LoadU64(r2, MemOperand(sp));
537 __ JumpIfRoot(r2, RootIndex::kTheHoleValue, &do_throw);
538
539 __ bind(&leave_and_return);
540 // Restore arguments count from the frame.
542 // Leave construct frame.
543 __ LeaveFrame(StackFrame::CONSTRUCT);
544
545 // Remove caller arguments from the stack and return.
546 __ DropArguments(r3);
547 __ Ret();
548
549 __ bind(&check_receiver);
550 // If the result is a smi, it is *not* an object in the ECMA sense.
551 __ JumpIfSmi(r2, &use_receiver);
552
553 // If the type of the result (stored in its map) is less than
554 // FIRST_JS_RECEIVER_TYPE, it is not an object in the ECMA sense.
555 static_assert(LAST_JS_RECEIVER_TYPE == LAST_TYPE);
556 __ CompareObjectType(r2, r6, r6, FIRST_JS_RECEIVER_TYPE);
557 __ bge(&leave_and_return);
558 __ b(&use_receiver);
559
560 __ bind(&do_throw);
561 // Restore the context from the frame.
563 __ CallRuntime(Runtime::kThrowConstructorReturnedNonObject);
564 __ bkpt(0);
565
566 __ bind(&stack_overflow);
567 // Restore the context from the frame.
569 __ CallRuntime(Runtime::kThrowStackOverflow);
570 // Unreachable code.
571 __ bkpt(0);
572}
573
574void Builtins::Generate_JSBuiltinsConstructStub(MacroAssembler* masm) {
575 Generate_JSBuiltinsConstructStubHelper(masm);
576}
577
578// static
579void Builtins::Generate_ResumeGeneratorTrampoline(MacroAssembler* masm) {
580 // ----------- S t a t e -------------
581 // -- r2 : the value to pass to the generator
582 // -- r3 : the JSGeneratorObject to resume
583 // -- lr : return address
584 // -----------------------------------
585 // Store input value into generator object.
586 __ StoreTaggedField(
587 r2, FieldMemOperand(r3, JSGeneratorObject::kInputOrDebugPosOffset), r0);
588 __ RecordWriteField(r3, JSGeneratorObject::kInputOrDebugPosOffset, r2, r5,
590 // Check that r3 is still valid, RecordWrite might have clobbered it.
591 __ AssertGeneratorObject(r3);
592
593 // Load suspended function and context.
594 __ LoadTaggedField(r6,
595 FieldMemOperand(r3, JSGeneratorObject::kFunctionOffset));
596 __ LoadTaggedField(cp, FieldMemOperand(r6, JSFunction::kContextOffset));
597
598 // Flood function if we are stepping.
599 Label prepare_step_in_if_stepping, prepare_step_in_suspended_generator;
600 Label stepping_prepared;
601 Register scratch = r7;
602
603 ExternalReference debug_hook =
604 ExternalReference::debug_hook_on_function_call_address(masm->isolate());
605 __ Move(scratch, debug_hook);
606 __ LoadS8(scratch, MemOperand(scratch));
607 __ CmpSmiLiteral(scratch, Smi::zero(), r0);
608 __ bne(&prepare_step_in_if_stepping);
609
610 // Flood function if we need to continue stepping in the suspended generator.
611
612 ExternalReference debug_suspended_generator =
613 ExternalReference::debug_suspended_generator_address(masm->isolate());
614
615 __ Move(scratch, debug_suspended_generator);
616 __ LoadU64(scratch, MemOperand(scratch));
617 __ CmpS64(scratch, r3);
618 __ beq(&prepare_step_in_suspended_generator);
619 __ bind(&stepping_prepared);
620
621 // Check the stack for overflow. We are not trying to catch interruptions
622 // (i.e. debug break and preemption) here, so check the "real stack limit".
623 Label stack_overflow;
624 __ LoadU64(scratch,
625 __ StackLimitAsMemOperand(StackLimitKind::kRealStackLimit));
626 __ CmpU64(sp, scratch);
627 __ blt(&stack_overflow);
628
629 // ----------- S t a t e -------------
630 // -- r3 : the JSGeneratorObject to resume
631 // -- r6 : generator function
632 // -- cp : generator context
633 // -- lr : return address
634 // -----------------------------------
635
636 // Copy the function arguments from the generator object's register file.
637 __ LoadTaggedField(
638 r5, FieldMemOperand(r6, JSFunction::kSharedFunctionInfoOffset));
639 __ LoadU16(
640 r5, FieldMemOperand(r5, SharedFunctionInfo::kFormalParameterCountOffset));
641 __ SubS64(r5, r5, Operand(kJSArgcReceiverSlots));
642 __ LoadTaggedField(
643 r4,
644 FieldMemOperand(r3, JSGeneratorObject::kParametersAndRegistersOffset));
645 {
646 Label done_loop, loop;
647 __ bind(&loop);
648 __ SubS64(r5, r5, Operand(1));
649 __ blt(&done_loop);
650 __ ShiftLeftU64(r1, r5, Operand(kTaggedSizeLog2));
651 __ la(scratch, MemOperand(r4, r1));
652 __ LoadTaggedField(
653 scratch, FieldMemOperand(scratch, OFFSET_OF_DATA_START(FixedArray)));
654 __ Push(scratch);
655 __ b(&loop);
656 __ bind(&done_loop);
657
658 // Push receiver.
659 __ LoadTaggedField(scratch,
660 FieldMemOperand(r3, JSGeneratorObject::kReceiverOffset));
661 __ Push(scratch);
662 }
663
664 // Underlying function needs to have bytecode available.
665 if (v8_flags.debug_code) {
666 Label is_baseline, is_unavailable, ok;
667 __ LoadTaggedField(
668 r5, FieldMemOperand(r6, JSFunction::kSharedFunctionInfoOffset));
669 GetSharedFunctionInfoBytecodeOrBaseline(masm, r5, r5, ip, &is_baseline,
670 &is_unavailable);
671 __ jmp(&ok);
672
673 __ bind(&is_unavailable);
674 __ Abort(AbortReason::kMissingBytecodeArray);
675
676 __ bind(&is_baseline);
677 __ CompareObjectType(r5, r5, r5, CODE_TYPE);
678 __ Assert(eq, AbortReason::kMissingBytecodeArray);
679
680 __ bind(&ok);
681 }
682
683 // Resume (Ignition/TurboFan) generator object.
684 {
685 __ LoadTaggedField(
686 r2, FieldMemOperand(r6, JSFunction::kSharedFunctionInfoOffset));
687 __ LoadS16(r2, FieldMemOperand(
688 r2, SharedFunctionInfo::kFormalParameterCountOffset));
689 // We abuse new.target both to indicate that this is a resume call and to
690 // pass in the generator object. In ordinary calls, new.target is always
691 // undefined because generator functions are non-constructable.
692 __ mov(r5, r3);
693 __ mov(r3, r6);
694 __ JumpJSFunction(r3);
695 }
696
697 __ bind(&prepare_step_in_if_stepping);
698 {
699 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
700 __ Push(r3, r6);
701 // Push hole as receiver since we do not use it for stepping.
702 __ PushRoot(RootIndex::kTheHoleValue);
703 __ CallRuntime(Runtime::kDebugOnFunctionCall);
704 __ Pop(r3);
705 __ LoadTaggedField(r6,
706 FieldMemOperand(r3, JSGeneratorObject::kFunctionOffset));
707 }
708 __ b(&stepping_prepared);
709
710 __ bind(&prepare_step_in_suspended_generator);
711 {
712 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
713 __ Push(r3);
714 __ CallRuntime(Runtime::kDebugPrepareStepInSuspendedGenerator);
715 __ Pop(r3);
716 __ LoadTaggedField(r6,
717 FieldMemOperand(r3, JSGeneratorObject::kFunctionOffset));
718 }
719 __ b(&stepping_prepared);
720
721 __ bind(&stack_overflow);
722 {
723 FrameScope scope(masm, StackFrame::INTERNAL);
724 __ CallRuntime(Runtime::kThrowStackOverflow);
725 __ bkpt(0); // This should be unreachable.
726 }
727}
728
729void Builtins::Generate_ConstructedNonConstructable(MacroAssembler* masm) {
730 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
731 __ push(r3);
732 __ CallRuntime(Runtime::kThrowConstructedNonConstructable);
733 __ Trap(); // Unreachable.
734}
735
736namespace {
737
738constexpr int kPushedStackSpace =
742
743// Called with the native C calling convention. The corresponding function
744// signature is either:
745//
746// using JSEntryFunction = GeneratedCode<Address(
747// Address root_register_value, Address new_target, Address target,
748// Address receiver, intptr_t argc, Address** args)>;
749// or
750// using JSEntryFunction = GeneratedCode<Address(
751// Address root_register_value, MicrotaskQueue* microtask_queue)>;
752void Generate_JSEntryVariant(MacroAssembler* masm, StackFrame::Type type,
753 Builtin entry_trampoline) {
754 // The register state is either:
755 // r2: root register value
756 // r3: code entry
757 // r4: function
758 // r5: receiver
759 // r6: argc
760 // [sp + 20 * kSystemPointerSize]: argv
761 // or
762 // r2: root_register_value
763 // r3: microtask_queue
764
765 Label invoke, handler_entry, exit;
766
767#if V8_OS_ZOS
768 const int stack_space = 12 * kSystemPointerSize;
769
770 // Store r4 - r15 to Stack
771 __ StoreMultipleP(r4, sp, MemOperand(r4, kStackPointerBias - stack_space));
772 // Grow stack
773 __ lay(r4, MemOperand(r4, -stack_space));
774
775 // Shuffle input XPLINK register arguments to match LoZ
776 __ mov(sp, r4);
777 __ mov(r4, r3);
778 __ mov(r3, r2);
779 __ mov(r2, r1);
780
781 // Load args 4 and 5 from XPLINK extra frame slots in r5 and r6
782 __ LoadMultipleP(
783 r5, r6,
784 MemOperand(sp, kStackPointerBias +
785 kXPLINKStackFrameExtraParamSlot * kSystemPointerSize +
786 stack_space));
787
788 // Load arg 6 from XPLINK extra arg slot
789 __ LoadU64(r0, MemOperand(sp, kStackPointerBias +
790 kXPLINKStackFrameExtraParamSlot *
792 stack_space + 2 * kSystemPointerSize));
793
794 // Store arg 6 to expected LoZ save area
795 __ StoreU64(r0, MemOperand(sp, kCalleeRegisterSaveAreaSize));
796#endif
797
798 int pushed_stack_space = 0;
799 {
800 NoRootArrayScope no_root_array(masm);
801
802 // saving floating point registers
803 // 64bit ABI requires f8 to f15 be saved
804 // http://refspecs.linuxbase.org/ELF/zSeries/lzsabi0_zSeries.html
805 __ lay(sp, MemOperand(sp, -8 * kDoubleSize));
806 __ std(d8, MemOperand(sp));
807 __ std(d9, MemOperand(sp, 1 * kDoubleSize));
808 __ std(d10, MemOperand(sp, 2 * kDoubleSize));
809 __ std(d11, MemOperand(sp, 3 * kDoubleSize));
810 __ std(d12, MemOperand(sp, 4 * kDoubleSize));
811 __ std(d13, MemOperand(sp, 5 * kDoubleSize));
812 __ std(d14, MemOperand(sp, 6 * kDoubleSize));
813 __ std(d15, MemOperand(sp, 7 * kDoubleSize));
814 pushed_stack_space += kNumCalleeSavedDoubles * kDoubleSize;
815
816 // zLinux ABI
817 // Incoming parameters:
818 // r2: root register value
819 // r3: code entry
820 // r4: function
821 // r5: receiver
822 // r6: argc
823 // [sp + 20 * kSystemPointerSize]: argv
824 // Requires us to save the callee-preserved registers r6-r13
825 // General convention is to also save r14 (return addr) and
826 // sp/r15 as well in a single STM/STMG
827 __ lay(sp, MemOperand(sp, -10 * kSystemPointerSize));
828 __ StoreMultipleP(r6, sp, MemOperand(sp, 0));
829 pushed_stack_space += (kNumCalleeSaved + 2) * kSystemPointerSize;
830
831 // Initialize the root register.
832 // C calling convention. The first argument is passed in r2.
833 __ mov(kRootRegister, r2);
834 }
835
836 // Push a frame with special values setup to mark it as an entry frame.
837 // Bad FP (-1)
838 // SMI Marker
839 // SMI Marker
840 // kCEntryFPAddress
841 // Frame type
842 // Clear c_entry_fp, now we've pushed its previous value to the stack.
843 // If the c_entry_fp is not already zero and we don't clear it, the
844 // StackFrameIteratorForProfiler will assume we are executing C++ and miss the
845 // JS frames on top.
846 pushed_stack_space += 7 * kSystemPointerSize;
847
848 // Push a bad frame pointer to fail if it is used.
849 __ mov(r0, Operand(-1));
850 __ push(r0);
851
852 __ mov(r0, Operand(StackFrame::TypeToMarker(type)));
853 __ push(r0);
854 __ push(r0);
855
856 __ mov(r0, Operand::Zero());
857 __ Move(ip, ExternalReference::Create(IsolateAddressId::kCEntryFPAddress,
858 masm->isolate()));
859 __ LoadU64(r9, MemOperand(ip));
860 __ StoreU64(r0, MemOperand(ip));
861 __ push(r9);
862
863 __ LoadIsolateField(ip, IsolateFieldId::kFastCCallCallerFP);
864 __ LoadU64(r9, MemOperand(ip));
865 __ StoreU64(r0, MemOperand(ip));
866 __ push(r9);
867
868 __ LoadIsolateField(ip, IsolateFieldId::kFastCCallCallerPC);
869 __ LoadU64(r9, MemOperand(ip));
870 __ StoreU64(r0, MemOperand(ip));
871 __ push(r9);
872
873#ifdef V8_COMPRESS_POINTERS_IN_SHARED_CAGE
874 // Initialize the pointer cage base register.
875 __ LoadRootRelative(kPtrComprCageBaseRegister,
876 IsolateData::cage_base_offset());
877#endif
878
879 Register scrach = r8;
880
881 // Set up frame pointer for the frame to be pushed.
883 pushed_stack_space +=
885
886 // If this is the outermost JS call, set js_entry_sp value.
887 Label non_outermost_js;
888 ExternalReference js_entry_sp = ExternalReference::Create(
889 IsolateAddressId::kJSEntrySPAddress, masm->isolate());
890 __ Move(r7, js_entry_sp);
891 __ LoadAndTestP(scrach, MemOperand(r7));
892 __ bne(&non_outermost_js, Label::kNear);
893 __ StoreU64(fp, MemOperand(r7));
894 __ mov(scrach, Operand(StackFrame::OUTERMOST_JSENTRY_FRAME));
895 Label cont;
896 __ b(&cont, Label::kNear);
897 __ bind(&non_outermost_js);
898 __ mov(scrach, Operand(StackFrame::INNER_JSENTRY_FRAME));
899
900 __ bind(&cont);
901 __ push(scrach); // frame-type
902
903 // Jump to a faked try block that does the invoke, with a faked catch
904 // block that sets the exception.
905 __ b(&invoke, Label::kNear);
906
907 __ bind(&handler_entry);
908
909 // Store the current pc as the handler offset. It's used later to create the
910 // handler table.
911 masm->isolate()->builtins()->SetJSEntryHandlerOffset(handler_entry.pos());
912
913 // Caught exception: Store result (exception) in the exception
914 // field in the JSEnv and return a failure sentinel. Coming in here the
915 // fp will be invalid because the PushStackHandler below sets it to 0 to
916 // signal the existence of the JSEntry frame.
917 __ Move(scrach, ExternalReference::Create(IsolateAddressId::kExceptionAddress,
918 masm->isolate()));
919
920 __ StoreU64(r2, MemOperand(scrach));
921 __ LoadRoot(r2, RootIndex::kException);
922 __ b(&exit, Label::kNear);
923
924 // Invoke: Link this frame into the handler chain.
925 __ bind(&invoke);
926 // Must preserve r2-r6.
927 __ PushStackHandler();
928 // If an exception not caught by another handler occurs, this handler
929 // returns control to the code after the b(&invoke) above, which
930 // restores all kCalleeSaved registers (including cp and fp) to their
931 // saved values before returning a failure to C.
932
933 // Invoke the function by calling through JS entry trampoline builtin.
934 // Notice that we cannot store a reference to the trampoline code directly in
935 // this stub, because runtime stubs are not traversed when doing GC.
936
937 // Invoke the function by calling through JS entry trampoline builtin and
938 // pop the faked function when we return.
939 USE(pushed_stack_space);
940 DCHECK_EQ(kPushedStackSpace, pushed_stack_space);
941 __ CallBuiltin(entry_trampoline);
942
943 // Unlink this frame from the handler chain.
944 __ PopStackHandler();
945 __ bind(&exit); // r2 holds result
946
947 // Check if the current stack frame is marked as the outermost JS frame.
948 Label non_outermost_js_2;
949 __ pop(r7);
950 __ CmpS64(r7, Operand(StackFrame::OUTERMOST_JSENTRY_FRAME));
951 __ bne(&non_outermost_js_2, Label::kNear);
952 __ mov(scrach, Operand::Zero());
953 __ Move(r7, js_entry_sp);
954 __ StoreU64(scrach, MemOperand(r7));
955 __ bind(&non_outermost_js_2);
956
957 // Restore the top frame descriptors from the stack.
958 __ pop(r5);
959 __ LoadIsolateField(scrach, IsolateFieldId::kFastCCallCallerPC);
960 __ StoreU64(r5, MemOperand(scrach));
961
962 __ pop(r5);
963 __ LoadIsolateField(scrach, IsolateFieldId::kFastCCallCallerFP);
964 __ StoreU64(r5, MemOperand(scrach));
965
966 __ pop(r5);
967 __ Move(scrach, ExternalReference::Create(IsolateAddressId::kCEntryFPAddress,
968 masm->isolate()));
969 __ StoreU64(r5, MemOperand(scrach));
970
971 // Reset the stack to the callee saved registers.
973
974 // Reload callee-saved preserved regs, return address reg (r14) and sp
975 __ LoadMultipleP(r6, sp, MemOperand(sp, 0));
976 __ la(sp, MemOperand(sp, 10 * kSystemPointerSize));
977
978 // 64bit ABI requires f8 to f15 be saved
979 __ ld(d8, MemOperand(sp));
980 __ ld(d9, MemOperand(sp, 1 * kDoubleSize));
981 __ ld(d10, MemOperand(sp, 2 * kDoubleSize));
982 __ ld(d11, MemOperand(sp, 3 * kDoubleSize));
983 __ ld(d12, MemOperand(sp, 4 * kDoubleSize));
984 __ ld(d13, MemOperand(sp, 5 * kDoubleSize));
985 __ ld(d14, MemOperand(sp, 6 * kDoubleSize));
986 __ ld(d15, MemOperand(sp, 7 * kDoubleSize));
987 __ la(sp, MemOperand(sp, 8 * kDoubleSize));
988
989#if V8_OS_ZOS
990 // On z/OS, the return register is r3
991 __ mov(r3, r2);
992 // Restore r4 - r15 from Stack
993 __ LoadMultipleP(r4, sp, MemOperand(sp, kStackPointerBias));
994 __ b(r7);
995#else
996 __ b(r14);
997#endif
998}
999
1000} // namespace
1001
1002void Builtins::Generate_JSEntry(MacroAssembler* masm) {
1003 Generate_JSEntryVariant(masm, StackFrame::ENTRY, Builtin::kJSEntryTrampoline);
1004}
1005
1006void Builtins::Generate_JSConstructEntry(MacroAssembler* masm) {
1007 Generate_JSEntryVariant(masm, StackFrame::CONSTRUCT_ENTRY,
1008 Builtin::kJSConstructEntryTrampoline);
1009}
1010
1011void Builtins::Generate_JSRunMicrotasksEntry(MacroAssembler* masm) {
1012 Generate_JSEntryVariant(masm, StackFrame::ENTRY,
1013 Builtin::kRunMicrotasksTrampoline);
1014}
1015
1016static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm,
1017 bool is_construct) {
1018 // Called from Generate_JS_Entry
1019 // r3: new.target
1020 // r4: function
1021 // r5: receiver
1022 // r6: argc
1023 // [fp + kPushedStackSpace + 20 * kSystemPointerSize]: argv
1024 // r0,r2,r7-r8, cp may be clobbered
1025
1026 __ mov(r2, r6);
1027 // Load argv from the stack.
1028 __ LoadU64(
1029 r6, MemOperand(fp, kPushedStackSpace + EntryFrameConstants::kArgvOffset));
1030
1031 // r2: argc
1032 // r3: new.target
1033 // r4: function
1034 // r5: receiver
1035 // r6: argv
1036
1037 // Enter an internal frame.
1038 {
1039 // FrameScope ends up calling MacroAssembler::EnterFrame here
1040 FrameScope scope(masm, StackFrame::INTERNAL);
1041
1042 // Setup the context (we need to use the caller context from the isolate).
1043 ExternalReference context_address = ExternalReference::Create(
1044 IsolateAddressId::kContextAddress, masm->isolate());
1045 __ Move(cp, context_address);
1046 __ LoadU64(cp, MemOperand(cp));
1047
1048 // Push the function
1049 __ Push(r4);
1050
1051 // Check if we have enough stack space to push all arguments.
1052 Label enough_stack_space, stack_overflow;
1053 __ mov(r7, r2);
1054 __ StackOverflowCheck(r7, r1, &stack_overflow);
1055 __ b(&enough_stack_space);
1056 __ bind(&stack_overflow);
1057 __ CallRuntime(Runtime::kThrowStackOverflow);
1058 // Unreachable code.
1059 __ bkpt(0);
1060
1061 __ bind(&enough_stack_space);
1062
1063 // Copy arguments to the stack from argv to sp.
1064 // The arguments are actually placed in reverse order on sp
1065 // compared to argv (i.e. arg1 is highest memory in sp).
1066 // r2: argc
1067 // r3: function
1068 // r5: new.target
1069 // r6: argv, i.e. points to first arg
1070 // r7: scratch reg to hold scaled argc
1071 // r8: scratch reg to hold arg handle
1072 Generate_PushArguments(masm, r6, r2, r1, ArgumentsElementType::kHandle);
1073
1074 // Push the receiver.
1075 __ Push(r5);
1076
1077 // Setup new.target, argc and function.
1078 __ mov(r5, r3);
1079 __ mov(r3, r4);
1080 // r2: argc
1081 // r3: function
1082 // r5: new.target
1083
1084 // Initialize all JavaScript callee-saved registers, since they will be seen
1085 // by the garbage collector as part of handlers.
1086 __ LoadRoot(r4, RootIndex::kUndefinedValue);
1087 __ mov(r6, r4);
1088 __ mov(r7, r6);
1089 __ mov(r8, r6);
1090
1091 // Invoke the code.
1092 Builtin builtin = is_construct ? Builtin::kConstruct : Builtins::Call();
1093 __ CallBuiltin(builtin);
1094
1095 // Exit the JS frame and remove the parameters (except function), and
1096 // return.
1097 }
1098 __ b(r14);
1099
1100 // r2: result
1101}
1102
1103void Builtins::Generate_JSEntryTrampoline(MacroAssembler* masm) {
1105}
1106
1107void Builtins::Generate_JSConstructEntryTrampoline(MacroAssembler* masm) {
1109}
1110
1111void Builtins::Generate_RunMicrotasksTrampoline(MacroAssembler* masm) {
1112 // This expects two C++ function parameters passed by Invoke() in
1113 // execution.cc.
1114 // r2: root_register_value
1115 // r3: microtask_queue
1116
1118 __ TailCallBuiltin(Builtin::kRunMicrotasks);
1119}
1120
1121static void LeaveInterpreterFrame(MacroAssembler* masm, Register scratch1,
1122 Register scratch2) {
1123 Register params_size = scratch1;
1124 // Get the size of the formal parameters + receiver (in bytes).
1125 __ LoadU64(params_size,
1127 __ LoadU16(params_size,
1128 FieldMemOperand(params_size, BytecodeArray::kParameterSizeOffset));
1129
1130 Register actual_params_size = scratch2;
1131 // Compute the size of the actual parameters + receiver (in bytes).
1132 __ LoadU64(actual_params_size,
1134
1135 // If actual is bigger than formal, then we should use it to free up the stack
1136 // arguments.
1137 Label corrected_args_count;
1138 __ CmpS64(params_size, actual_params_size);
1139 __ bge(&corrected_args_count);
1140 __ mov(params_size, actual_params_size);
1141 __ bind(&corrected_args_count);
1142
1143 // Leave the frame (also dropping the register file).
1144 __ LeaveFrame(StackFrame::INTERPRETED);
1145
1146 __ DropArguments(params_size);
1147}
1148
1149// Advance the current bytecode offset. This simulates what all bytecode
1150// handlers do upon completion of the underlying operation. Will bail out to a
1151// label if the bytecode (without prefix) is a return bytecode. Will not advance
1152// the bytecode offset if the current bytecode is a JumpLoop, instead just
1153// re-executing the JumpLoop to jump to the correct bytecode.
1154static void AdvanceBytecodeOffsetOrReturn(MacroAssembler* masm,
1155 Register bytecode_array,
1156 Register bytecode_offset,
1157 Register bytecode, Register scratch1,
1158 Register scratch2, Label* if_return) {
1159 Register bytecode_size_table = scratch1;
1160 Register scratch3 = bytecode;
1161
1162 // The bytecode offset value will be increased by one in wide and extra wide
1163 // cases. In the case of having a wide or extra wide JumpLoop bytecode, we
1164 // will restore the original bytecode. In order to simplify the code, we have
1165 // a backup of it.
1166 Register original_bytecode_offset = scratch2;
1167 DCHECK(!AreAliased(bytecode_array, bytecode_offset, bytecode_size_table,
1168 bytecode, original_bytecode_offset));
1169 __ Move(bytecode_size_table,
1170 ExternalReference::bytecode_size_table_address());
1171 __ Move(original_bytecode_offset, bytecode_offset);
1172
1173 // Check if the bytecode is a Wide or ExtraWide prefix bytecode.
1174 Label process_bytecode, extra_wide;
1175 static_assert(0 == static_cast<int>(interpreter::Bytecode::kWide));
1176 static_assert(1 == static_cast<int>(interpreter::Bytecode::kExtraWide));
1177 static_assert(2 == static_cast<int>(interpreter::Bytecode::kDebugBreakWide));
1178 static_assert(3 ==
1179 static_cast<int>(interpreter::Bytecode::kDebugBreakExtraWide));
1180 __ CmpS64(bytecode, Operand(0x3));
1181 __ bgt(&process_bytecode);
1182 __ tmll(bytecode, Operand(0x1));
1183 __ bne(&extra_wide);
1184
1185 // Load the next bytecode and update table to the wide scaled table.
1186 __ AddS64(bytecode_offset, bytecode_offset, Operand(1));
1187 __ LoadU8(bytecode, MemOperand(bytecode_array, bytecode_offset));
1188 __ AddS64(bytecode_size_table, bytecode_size_table,
1190 __ b(&process_bytecode);
1191
1192 __ bind(&extra_wide);
1193 // Load the next bytecode and update table to the extra wide scaled table.
1194 __ AddS64(bytecode_offset, bytecode_offset, Operand(1));
1195 __ LoadU8(bytecode, MemOperand(bytecode_array, bytecode_offset));
1196 __ AddS64(bytecode_size_table, bytecode_size_table,
1198
1199 // Load the size of the current bytecode.
1200 __ bind(&process_bytecode);
1201
1202 // Bailout to the return label if this is a return bytecode.
1203#define JUMP_IF_EQUAL(NAME) \
1204 __ CmpS64(bytecode, \
1205 Operand(static_cast<int>(interpreter::Bytecode::k##NAME))); \
1206 __ beq(if_return);
1208#undef JUMP_IF_EQUAL
1209
1210 // If this is a JumpLoop, re-execute it to perform the jump to the beginning
1211 // of the loop.
1212 Label end, not_jump_loop;
1213 __ CmpS64(bytecode,
1214 Operand(static_cast<int>(interpreter::Bytecode::kJumpLoop)));
1215 __ bne(&not_jump_loop);
1216 // We need to restore the original bytecode_offset since we might have
1217 // increased it to skip the wide / extra-wide prefix bytecode.
1218 __ Move(bytecode_offset, original_bytecode_offset);
1219 __ b(&end);
1220
1221 __ bind(&not_jump_loop);
1222 // Otherwise, load the size of the current bytecode and advance the offset.
1223 __ LoadU8(scratch3, MemOperand(bytecode_size_table, bytecode));
1224 __ AddS64(bytecode_offset, bytecode_offset, scratch3);
1225
1226 __ bind(&end);
1227}
1228
1229// static
1230void Builtins::Generate_BaselineOutOfLinePrologue(MacroAssembler* masm) {
1231 // UseScratchRegisterScope temps(masm);
1232 // Need a few extra registers
1233 // temps.Include(r8, r9);
1234
1235 auto descriptor =
1236 Builtins::CallInterfaceDescriptorFor(Builtin::kBaselineOutOfLinePrologue);
1237 Register closure = descriptor.GetRegisterParameter(
1238 BaselineOutOfLinePrologueDescriptor::kClosure);
1239 // Load the feedback cell and vector from the closure.
1240 Register feedback_cell = r6;
1241 Register feedback_vector = ip;
1242 __ LoadTaggedField(feedback_cell,
1243 FieldMemOperand(closure, JSFunction::kFeedbackCellOffset));
1244 __ LoadTaggedField(
1245 feedback_vector,
1246 FieldMemOperand(feedback_cell, FeedbackCell::kValueOffset));
1247 __ AssertFeedbackVector(feedback_vector, r1);
1248
1249#ifndef V8_ENABLE_LEAPTIERING
1250 // Check for an tiering state.
1251 Label flags_need_processing;
1252 Register flags = r8;
1253 {
1254 __ LoadFeedbackVectorFlagsAndJumpIfNeedsProcessing(
1255 flags, feedback_vector, CodeKind::BASELINE, &flags_need_processing);
1256 }
1257#endif // !V8_ENABLE_LEAPTIERING
1258
1259 {
1260 UseScratchRegisterScope temps(masm);
1261 ResetFeedbackVectorOsrUrgency(masm, feedback_vector, r1);
1262 }
1263
1264 // Increment invocation count for the function.
1265 {
1266 Register invocation_count = r1;
1267 __ LoadU32(invocation_count,
1268 FieldMemOperand(feedback_vector,
1269 FeedbackVector::kInvocationCountOffset));
1270 __ AddU32(invocation_count, Operand(1));
1271 __ StoreU32(invocation_count,
1272 FieldMemOperand(feedback_vector,
1273 FeedbackVector::kInvocationCountOffset));
1274 }
1275
1276 FrameScope frame_scope(masm, StackFrame::MANUAL);
1277 {
1278 ASM_CODE_COMMENT_STRING(masm, "Frame Setup");
1279 // Normally the first thing we'd do here is Push(lr, fp), but we already
1280 // entered the frame in BaselineCompiler::Prologue, as we had to use the
1281 // value lr before the call to this BaselineOutOfLinePrologue builtin.
1282
1283 Register callee_context = descriptor.GetRegisterParameter(
1284 BaselineOutOfLinePrologueDescriptor::kCalleeContext);
1285 Register callee_js_function = descriptor.GetRegisterParameter(
1286 BaselineOutOfLinePrologueDescriptor::kClosure);
1287 ResetJSFunctionAge(masm, callee_js_function, r1, r0);
1288 __ Push(callee_context, callee_js_function);
1289 DCHECK_EQ(callee_js_function, kJavaScriptCallTargetRegister);
1290 DCHECK_EQ(callee_js_function, kJSFunctionRegister);
1291
1292 Register argc = descriptor.GetRegisterParameter(
1293 BaselineOutOfLinePrologueDescriptor::kJavaScriptCallArgCount);
1294 // We'll use the bytecode for both code age/OSR resetting, and pushing onto
1295 // the frame, so load it into a register.
1296 Register bytecodeArray = descriptor.GetRegisterParameter(
1297 BaselineOutOfLinePrologueDescriptor::kInterpreterBytecodeArray);
1298
1299 __ Push(argc, bytecodeArray);
1300
1301 if (v8_flags.debug_code) {
1302 Register scratch = r1;
1303 __ CompareObjectType(feedback_vector, scratch, scratch,
1304 FEEDBACK_VECTOR_TYPE);
1305 __ Assert(eq, AbortReason::kExpectedFeedbackVector);
1306 }
1307 __ Push(feedback_cell);
1308 __ Push(feedback_vector);
1309 }
1310
1311 Label call_stack_guard;
1312 Register frame_size = descriptor.GetRegisterParameter(
1313 BaselineOutOfLinePrologueDescriptor::kStackFrameSize);
1314 {
1315 ASM_CODE_COMMENT_STRING(masm, "Stack/interrupt check");
1316 // Stack check. This folds the checks for both the interrupt stack limit
1317 // check and the real stack limit into one by just checking for the
1318 // interrupt limit. The interrupt limit is either equal to the real stack
1319 // limit or tighter. By ensuring we have space until that limit after
1320 // building the frame we can quickly precheck both at once.
1321
1322 Register sp_minus_frame_size = r1;
1323 Register interrupt_limit = r0;
1324 __ SubS64(sp_minus_frame_size, sp, frame_size);
1325 __ LoadStackLimit(interrupt_limit, StackLimitKind::kInterruptStackLimit);
1326 __ CmpU64(sp_minus_frame_size, interrupt_limit);
1327 __ blt(&call_stack_guard);
1328 }
1329
1330 // Do "fast" return to the caller pc in lr.
1331 __ LoadRoot(kInterpreterAccumulatorRegister, RootIndex::kUndefinedValue);
1332 __ Ret();
1333
1334#ifndef V8_ENABLE_LEAPTIERING
1335 __ bind(&flags_need_processing);
1336 {
1337 ASM_CODE_COMMENT_STRING(masm, "Optimized marker check");
1338
1339 // Drop the frame created by the baseline call.
1340 __ Pop(r14, fp);
1341 __ OptimizeCodeOrTailCallOptimizedCodeSlot(flags, feedback_vector);
1342 __ Trap();
1343 }
1344#endif // !V8_ENABLE_LEAPTIERING
1345
1346 __ bind(&call_stack_guard);
1347 {
1348 ASM_CODE_COMMENT_STRING(masm, "Stack/interrupt call");
1349 FrameScope frame_scope(masm, StackFrame::INTERNAL);
1350 // Save incoming new target or generator
1352 __ SmiTag(frame_size);
1353 __ Push(frame_size);
1354 __ CallRuntime(Runtime::kStackGuardWithGap);
1356 }
1357
1358 __ LoadRoot(kInterpreterAccumulatorRegister, RootIndex::kUndefinedValue);
1359 __ Ret();
1360}
1361
1362// static
1363void Builtins::Generate_BaselineOutOfLinePrologueDeopt(MacroAssembler* masm) {
1364 // We're here because we got deopted during BaselineOutOfLinePrologue's stack
1365 // check. Undo all its frame creation and call into the interpreter instead.
1366
1367 // Drop the feedback vector, the bytecode offset (was the feedback vector but
1368 // got replaced during deopt) and bytecode array.
1369 __ Drop(3);
1370
1371 // Context, closure, argc.
1374
1375 // Drop frame pointer
1376 __ LeaveFrame(StackFrame::BASELINE);
1377
1378 // Enter the interpreter.
1379 __ TailCallBuiltin(Builtin::kInterpreterEntryTrampoline);
1380}
1381
1382// Generate code for entering a JS function with the interpreter.
1383// On entry to the function the receiver and arguments have been pushed on the
1384// stack left to right.
1385//
1386// The live registers are:
1387// o r2: actual argument count
1388// o r3: the JS function object being called.
1389// o r5: the incoming new target or generator object
1390// o cp: our context
1391// o pp: the caller's constant pool pointer (if enabled)
1392// o fp: the caller's frame pointer
1393// o sp: stack pointer
1394// o lr: return address
1395//
1396// The function builds an interpreter frame. See InterpreterFrameConstants in
1397// frame-constants.h for its layout.
1399 MacroAssembler* masm, InterpreterEntryTrampolineMode mode) {
1400 Register closure = r3;
1401
1402 // Get the bytecode array from the function object and load it into
1403 // kInterpreterBytecodeArrayRegister.
1404 __ LoadTaggedField(
1405 r6, FieldMemOperand(closure, JSFunction::kSharedFunctionInfoOffset));
1406 ResetSharedFunctionInfoAge(masm, r6, ip);
1407
1408 // The bytecode array could have been flushed from the shared function info,
1409 // if so, call into CompileLazy.
1410 Label is_baseline, compile_lazy;
1413 &is_baseline, &compile_lazy);
1414
1415 Label push_stack_frame;
1416 Register feedback_vector = r4;
1417 __ LoadFeedbackVector(feedback_vector, closure, r6, &push_stack_frame);
1418
1419#ifndef V8_JITLESS
1420#ifndef V8_ENABLE_LEAPTIERING
1421 // If feedback vector is valid, check for optimized code and update invocation
1422 // count.
1423
1424 Register flags = r6;
1425 Label flags_need_processing;
1426 __ LoadFeedbackVectorFlagsAndJumpIfNeedsProcessing(
1427 flags, feedback_vector, CodeKind::INTERPRETED_FUNCTION,
1428 &flags_need_processing);
1429#endif // !V8_ENABLE_LEAPTIERING
1430
1431 ResetFeedbackVectorOsrUrgency(masm, feedback_vector, r1);
1432
1433 // Increment invocation count for the function.
1434 __ LoadS32(r1, FieldMemOperand(feedback_vector,
1435 FeedbackVector::kInvocationCountOffset));
1436 __ AddS64(r1, r1, Operand(1));
1437 __ StoreU32(r1, FieldMemOperand(feedback_vector,
1438 FeedbackVector::kInvocationCountOffset));
1439
1440 // Open a frame scope to indicate that there is a frame on the stack. The
1441 // MANUAL indicates that the scope shouldn't actually generate code to set up
1442 // the frame (that is done below).
1443
1444#else
1445 // Note: By omitting the above code in jitless mode we also disable:
1446 // - kFlagsLogNextExecution: only used for logging/profiling; and
1447 // - kInvocationCountOffset: only used for tiering heuristics and code
1448 // coverage.
1449#endif // !V8_JITLESS
1450
1451 __ bind(&push_stack_frame);
1452 FrameScope frame_scope(masm, StackFrame::MANUAL);
1453 __ PushStandardFrame(closure);
1454
1455 // Load the initial bytecode offset.
1458
1459 // Push bytecode array and Smi tagged bytecode array offset.
1461 __ Push(kInterpreterBytecodeArrayRegister, r0, feedback_vector);
1462
1463 // Allocate the local and temporary register file on the stack.
1464 Label stack_overflow;
1465 {
1466 // Load frame size (word) from the BytecodeArray object.
1468 BytecodeArray::kFrameSizeOffset));
1469
1470 // Do a stack check to ensure we don't go over the limit.
1471 __ SubS64(r8, sp, r4);
1472 __ CmpU64(r8, __ StackLimitAsMemOperand(StackLimitKind::kRealStackLimit));
1473 __ blt(&stack_overflow);
1474
1475 // If ok, push undefined as the initial value for all register file entries.
1476 // TODO(rmcilroy): Consider doing more than one push per loop iteration.
1477 Label loop, no_args;
1478 __ LoadRoot(kInterpreterAccumulatorRegister, RootIndex::kUndefinedValue);
1479 __ ShiftRightU64(r4, r4, Operand(kSystemPointerSizeLog2));
1480 __ LoadAndTestP(r4, r4);
1481 __ beq(&no_args);
1482 __ mov(r1, r4);
1483 __ bind(&loop);
1485 __ SubS64(r1, Operand(1));
1486 __ bne(&loop);
1487 __ bind(&no_args);
1488 }
1489
1490 // If the bytecode array has a valid incoming new target or generator object
1491 // register, initialize it with incoming value which was passed in r5.
1492 Label no_incoming_new_target_or_generator_register;
1493 __ LoadS32(r8,
1496 BytecodeArray::kIncomingNewTargetOrGeneratorRegisterOffset));
1497 __ CmpS64(r8, Operand::Zero());
1498 __ beq(&no_incoming_new_target_or_generator_register);
1499 __ ShiftLeftU64(r8, r8, Operand(kSystemPointerSizeLog2));
1500 __ StoreU64(r5, MemOperand(fp, r8));
1501 __ bind(&no_incoming_new_target_or_generator_register);
1502
1503 // Perform interrupt stack check.
1504 // TODO(solanes): Merge with the real stack limit check above.
1505 Label stack_check_interrupt, after_stack_check_interrupt;
1506 __ LoadU64(r0,
1507 __ StackLimitAsMemOperand(StackLimitKind::kInterruptStackLimit));
1508 __ CmpU64(sp, r0);
1509 __ blt(&stack_check_interrupt);
1510 __ bind(&after_stack_check_interrupt);
1511
1512 // The accumulator is already loaded with undefined.
1513
1514 // Load the dispatch table into a register and dispatch to the bytecode
1515 // handler at the current bytecode offset.
1516 Label do_dispatch;
1517 __ bind(&do_dispatch);
1518 __ Move(
1520 ExternalReference::interpreter_dispatch_table_address(masm->isolate()));
1521
1524 __ ShiftLeftU64(r5, r5, Operand(kSystemPointerSizeLog2));
1528
1529 __ RecordComment("--- InterpreterEntryReturnPC point ---");
1531 masm->isolate()->heap()->SetInterpreterEntryReturnPCOffset(
1532 masm->pc_offset());
1533 } else {
1535 // Both versions must be the same up to this point otherwise the builtins
1536 // will not be interchangable.
1537 CHECK_EQ(
1538 masm->isolate()->heap()->interpreter_entry_return_pc_offset().value(),
1539 masm->pc_offset());
1540 }
1541
1542 // Any returns to the entry trampoline are either due to the return bytecode
1543 // or the interpreter tail calling a builtin and then a dispatch.
1544
1545 // Get bytecode array and bytecode offset from the stack frame.
1551
1552 // Either return, or advance to the next bytecode and dispatch.
1553 Label do_return;
1558 &do_return);
1559 __ b(&do_dispatch);
1560
1561 __ bind(&do_return);
1562 // The return value is in r2.
1563 LeaveInterpreterFrame(masm, r4, r6);
1564 __ Ret();
1565
1566 __ bind(&stack_check_interrupt);
1567 // Modify the bytecode offset in the stack to be kFunctionEntryBytecodeOffset
1568 // for the call to the StackGuard.
1574 __ CallRuntime(Runtime::kStackGuard);
1575
1576 // After the call, restore the bytecode array, bytecode offset and accumulator
1577 // registers again. Also, restore the bytecode offset in the stack to its
1578 // previous value.
1583 __ LoadRoot(kInterpreterAccumulatorRegister, RootIndex::kUndefinedValue);
1584
1586 __ StoreU64(r0,
1588
1589 __ jmp(&after_stack_check_interrupt);
1590
1591#ifndef V8_JITLESS
1592#ifndef V8_ENABLE_LEAPTIERING
1593 __ bind(&flags_need_processing);
1594 __ OptimizeCodeOrTailCallOptimizedCodeSlot(flags, feedback_vector);
1595#endif // !V8_ENABLE_LEAPTIERING
1596
1597 __ bind(&is_baseline);
1598 {
1599#ifndef V8_ENABLE_LEAPTIERING
1600 // Load the feedback vector from the closure.
1601 __ LoadTaggedField(
1602 feedback_vector,
1603 FieldMemOperand(closure, JSFunction::kFeedbackCellOffset));
1604 __ LoadTaggedField(
1605 feedback_vector,
1606 FieldMemOperand(feedback_vector, FeedbackCell::kValueOffset));
1607
1608 Label install_baseline_code;
1609 // Check if feedback vector is valid. If not, call prepare for baseline to
1610 // allocate it.
1611 __ LoadTaggedField(
1612 ip, FieldMemOperand(feedback_vector, HeapObject::kMapOffset));
1613 __ LoadU16(ip, FieldMemOperand(ip, Map::kInstanceTypeOffset));
1614 __ CmpS32(ip, Operand(FEEDBACK_VECTOR_TYPE));
1615 __ b(ne, &install_baseline_code);
1616
1617 // Check for an tiering state.
1618 __ LoadFeedbackVectorFlagsAndJumpIfNeedsProcessing(
1619 flags, feedback_vector, CodeKind::BASELINE, &flags_need_processing);
1620
1621 // Load the baseline code into the closure.
1623 static_assert(kJavaScriptCallCodeStartRegister == r4, "ABI mismatch");
1624 __ ReplaceClosureCodeWithOptimizedCode(r4, closure, ip, r1);
1625 __ JumpCodeObject(r4);
1626
1627 __ bind(&install_baseline_code);
1628#endif // !V8_ENABLE_LEAPTIERING
1629 __ GenerateTailCallToReturnedCode(Runtime::kInstallBaselineCode);
1630 }
1631#endif // !V8_JITLESS
1632
1633 __ bind(&compile_lazy);
1634 __ GenerateTailCallToReturnedCode(Runtime::kCompileLazy);
1635
1636 __ bind(&stack_overflow);
1637 __ CallRuntime(Runtime::kThrowStackOverflow);
1638 __ bkpt(0); // Should not return.
1639}
1640
1641static void GenerateInterpreterPushArgs(MacroAssembler* masm, Register num_args,
1642 Register start_address,
1643 Register scratch) {
1644 ASM_CODE_COMMENT(masm);
1645 __ SubS64(scratch, num_args, Operand(1));
1646 __ ShiftLeftU64(scratch, scratch, Operand(kSystemPointerSizeLog2));
1647 __ SubS64(start_address, start_address, scratch);
1648 // Push the arguments.
1649 __ PushArray(start_address, num_args, r1, scratch,
1651}
1652
1653// static
1655 MacroAssembler* masm, ConvertReceiverMode receiver_mode,
1658 // ----------- S t a t e -------------
1659 // -- r2 : the number of arguments
1660 // -- r4 : the address of the first argument to be pushed. Subsequent
1661 // arguments should be consecutive above this, in the same order as
1662 // they are to be pushed onto the stack.
1663 // -- r3 : the target to call (can be any Object).
1664 // -----------------------------------
1665 Label stack_overflow;
1667 // The spread argument should not be pushed.
1668 __ SubS64(r2, r2, Operand(1));
1669 }
1670
1671 if (receiver_mode == ConvertReceiverMode::kNullOrUndefined) {
1672 __ SubS64(r5, r2, Operand(kJSArgcReceiverSlots));
1673 } else {
1674 __ mov(r5, r2);
1675 }
1676
1677 __ StackOverflowCheck(r5, ip, &stack_overflow);
1678
1679 // Push the arguments.
1680 GenerateInterpreterPushArgs(masm, r5, r4, r6);
1681
1682 if (receiver_mode == ConvertReceiverMode::kNullOrUndefined) {
1683 __ PushRoot(RootIndex::kUndefinedValue);
1684 }
1685
1687 // Pass the spread in the register r2.
1688 // r2 already points to the penultimate argument, the spread
1689 // lies in the next interpreter register.
1690 __ LoadU64(r4, MemOperand(r4, -kSystemPointerSize));
1691 }
1692
1693 // Call the target.
1695 __ TailCallBuiltin(Builtin::kCallWithSpread);
1696 } else {
1697 __ TailCallBuiltin(Builtins::Call(receiver_mode));
1698 }
1699
1700 __ bind(&stack_overflow);
1701 {
1702 __ TailCallRuntime(Runtime::kThrowStackOverflow);
1703 // Unreachable Code.
1704 __ bkpt(0);
1705 }
1706}
1707
1708// static
1710 MacroAssembler* masm, InterpreterPushArgsMode mode) {
1711 // ----------- S t a t e -------------
1712 // -- r2 : argument count
1713 // -- r5 : new target
1714 // -- r3 : constructor to call
1715 // -- r4 : allocation site feedback if available, undefined otherwise.
1716 // -- r6 : address of the first argument
1717 // -----------------------------------
1718 Label stack_overflow;
1719 __ StackOverflowCheck(r2, ip, &stack_overflow);
1720
1722 // The spread argument should not be pushed.
1723 __ SubS64(r2, r2, Operand(1));
1724 }
1725
1726 Register argc_without_receiver = ip;
1727 __ SubS64(argc_without_receiver, r2, Operand(kJSArgcReceiverSlots));
1728 // Push the arguments. r4 and r5 will be modified.
1729 GenerateInterpreterPushArgs(masm, argc_without_receiver, r6, r7);
1730
1731 // Push a slot for the receiver to be constructed.
1732 __ mov(r0, Operand::Zero());
1733 __ push(r0);
1734
1736 // Pass the spread in the register r2.
1737 // r4 already points to the penultimate argument, the spread
1738 // lies in the next interpreter register.
1739 __ lay(r6, MemOperand(r6, -kSystemPointerSize));
1740 __ LoadU64(r4, MemOperand(r6));
1741 } else {
1742 __ AssertUndefinedOrAllocationSite(r4, r7);
1743 }
1744
1746 __ AssertFunction(r3);
1747
1748 // Tail call to the array construct stub (still in the caller
1749 // context at this point).
1750 __ TailCallBuiltin(Builtin::kArrayConstructorImpl);
1751 } else if (mode == InterpreterPushArgsMode::kWithFinalSpread) {
1752 // Call the constructor with r2, r3, and r5 unmodified.
1753 __ TailCallBuiltin(Builtin::kConstructWithSpread);
1754 } else {
1756 // Call the constructor with r2, r3, and r5 unmodified.
1757 __ TailCallBuiltin(Builtin::kConstruct);
1758 }
1759
1760 __ bind(&stack_overflow);
1761 {
1762 __ TailCallRuntime(Runtime::kThrowStackOverflow);
1763 // Unreachable Code.
1764 __ bkpt(0);
1765 }
1766}
1767
1768// static
1770 MacroAssembler* masm, ForwardWhichFrame which_frame) {
1771 // ----------- S t a t e -------------
1772 // -- r5 : new target
1773 // -- r3 : constructor to call
1774 // -----------------------------------
1775 Label stack_overflow;
1776
1777 // Load the frame pointer into r6.
1778 switch (which_frame) {
1780 __ mov(r6, fp);
1781 break;
1784 break;
1785 }
1786
1787 // Load the argument count into r2.
1789 __ StackOverflowCheck(r2, ip, &stack_overflow);
1790
1791 // Point r6 to the base of the argument list to forward, excluding the
1792 // receiver.
1793 __ AddS64(r6, r6,
1796
1797 // Copy arguments on the stack. r5 is a scratch register.
1798 Register argc_without_receiver = ip;
1799 __ SubS64(argc_without_receiver, r2, Operand(kJSArgcReceiverSlots));
1800 __ PushArray(r6, argc_without_receiver, r1, r7);
1801
1802 // Push a slot for the receiver.
1803 __ mov(r0, Operand::Zero());
1804 __ push(r0);
1805
1806 // Call the constructor with r2, r5, and r3 unmodifdied.
1807 __ TailCallBuiltin(Builtin::kConstruct);
1808
1809 __ bind(&stack_overflow);
1810 {
1811 __ TailCallRuntime(Runtime::kThrowStackOverflow);
1812 // Unreachable Code.
1813 __ bkpt(0);
1814 }
1815}
1816
1817namespace {
1818
1819void NewImplicitReceiver(MacroAssembler* masm) {
1820 // ----------- S t a t e -------------
1821 // -- r2 : argument count
1822 // -- r3 : constructor to call (checked to be a JSFunction)
1823 // -- r5 : new target
1824 //
1825 // Stack:
1826 // -- Implicit Receiver
1827 // -- [arguments without receiver]
1828 // -- Implicit Receiver
1829 // -- Context
1830 // -- FastConstructMarker
1831 // -- FramePointer
1832 // -----------------------------------
1833 Register implicit_receiver = r6;
1834
1835 // Save live registers.
1836 __ SmiTag(r2);
1837 __ Push(r2, r3, r5);
1838 __ CallBuiltin(Builtin::kFastNewObject);
1839 // Save result.
1840 __ Move(implicit_receiver, r2);
1841 // Restore live registers.
1842 __ Pop(r2, r3, r5);
1843 __ SmiUntag(r2);
1844
1845 // Patch implicit receiver (in arguments)
1846 __ StoreU64(implicit_receiver, MemOperand(sp, 0 * kSystemPointerSize));
1847 // Patch second implicit (in construct frame)
1848 __ StoreU64(
1849 implicit_receiver,
1851
1852 // Restore context.
1854}
1855
1856} // namespace
1857
1858// static
1859void Builtins::Generate_InterpreterPushArgsThenFastConstructFunction(
1860 MacroAssembler* masm) {
1861 // ----------- S t a t e -------------
1862 // -- r2 : argument count
1863 // -- r3 : constructor to call (checked to be a JSFunction)
1864 // -- r5 : new target
1865 // -- r6 : address of the first argument
1866 // -- cp/r13 : context pointer
1867 // -----------------------------------
1868 __ AssertFunction(r3);
1869
1870 // Check if target has a [[Construct]] internal method.
1871 Label non_constructor;
1872 __ LoadMap(r4, r3);
1873 __ LoadU8(r4, FieldMemOperand(r4, Map::kBitFieldOffset));
1874 __ TestBit(r4, Map::Bits1::IsConstructorBit::kShift);
1875 __ beq(&non_constructor);
1876
1877 // Add a stack check before pushing arguments.
1878 Label stack_overflow;
1879 __ StackOverflowCheck(r2, r4, &stack_overflow);
1880
1881 // Enter a construct frame.
1882 FrameScope scope(masm, StackFrame::MANUAL);
1883 __ EnterFrame(StackFrame::FAST_CONSTRUCT);
1884 // Implicit receiver stored in the construct frame.
1885 __ LoadRoot(r4, RootIndex::kTheHoleValue);
1886 __ Push(cp, r4);
1887
1888 // Push arguments + implicit receiver.
1889 Register argc_without_receiver = r8;
1890 __ SubS64(argc_without_receiver, r2, Operand(kJSArgcReceiverSlots));
1891 // Push the arguments. r6 and r7 will be modified.
1892 GenerateInterpreterPushArgs(masm, argc_without_receiver, r6, r7);
1893 // Implicit receiver as part of the arguments (patched later if needed).
1894 __ push(r4);
1895
1896 // Check if it is a builtin call.
1897 Label builtin_call;
1898 __ LoadTaggedField(
1899 r4, FieldMemOperand(r3, JSFunction::kSharedFunctionInfoOffset));
1900 __ LoadU32(r4, FieldMemOperand(r4, SharedFunctionInfo::kFlagsOffset));
1901 __ AndP(r0, r4, Operand(SharedFunctionInfo::ConstructAsBuiltinBit::kMask));
1902 __ bne(&builtin_call);
1903
1904 // Check if we need to create an implicit receiver.
1905 Label not_create_implicit_receiver;
1906 __ DecodeField<SharedFunctionInfo::FunctionKindBits>(r4);
1907 __ JumpIfIsInRange(
1908 r4, r4, static_cast<uint32_t>(FunctionKind::kDefaultDerivedConstructor),
1909 static_cast<uint32_t>(FunctionKind::kDerivedConstructor),
1910 &not_create_implicit_receiver);
1911 NewImplicitReceiver(masm);
1912 __ bind(&not_create_implicit_receiver);
1913
1914 // Call the function.
1915 __ InvokeFunctionWithNewTarget(r3, r5, r2, InvokeType::kCall);
1916
1917 // ----------- S t a t e -------------
1918 // -- r0 constructor result
1919 //
1920 // Stack:
1921 // -- Implicit Receiver
1922 // -- Context
1923 // -- FastConstructMarker
1924 // -- FramePointer
1925 // -----------------------------------
1926
1927 // Store offset of return address for deoptimizer.
1928 masm->isolate()->heap()->SetConstructStubInvokeDeoptPCOffset(
1929 masm->pc_offset());
1930
1931 // If the result is an object (in the ECMA sense), we should get rid
1932 // of the receiver and use the result; see ECMA-262 section 13.2.2-7
1933 // on page 74.
1934 Label use_receiver, do_throw, leave_and_return, check_receiver;
1935
1936 // If the result is undefined, we jump out to using the implicit receiver.
1937 __ JumpIfNotRoot(r2, RootIndex::kUndefinedValue, &check_receiver);
1938
1939 // Otherwise we do a smi check and fall through to check if the return value
1940 // is a valid receiver.
1941
1942 // Throw away the result of the constructor invocation and use the
1943 // on-stack receiver as the result.
1944 __ bind(&use_receiver);
1945 __ LoadU64(
1947 __ JumpIfRoot(r2, RootIndex::kTheHoleValue, &do_throw);
1948
1949 __ bind(&leave_and_return);
1950 // Leave construct frame.
1951 __ LeaveFrame(StackFrame::CONSTRUCT);
1952 __ Ret();
1953
1954 __ bind(&check_receiver);
1955 // If the result is a smi, it is *not* an object in the ECMA sense.
1956 __ JumpIfSmi(r2, &use_receiver);
1957
1958 // If the type of the result (stored in its map) is less than
1959 // FIRST_JS_RECEIVER_TYPE, it is not an object in the ECMA sense.
1960 static_assert(LAST_JS_RECEIVER_TYPE == LAST_TYPE);
1961 __ CompareObjectType(r2, r6, r7, FIRST_JS_RECEIVER_TYPE);
1962 __ bge(&leave_and_return);
1963 __ b(&use_receiver);
1964
1965 __ bind(&builtin_call);
1966 // TODO(victorgomes): Check the possibility to turn this into a tailcall.
1967 __ InvokeFunctionWithNewTarget(r3, r5, r2, InvokeType::kCall);
1968 __ LeaveFrame(StackFrame::FAST_CONSTRUCT);
1969 __ Ret();
1970
1971 __ bind(&do_throw);
1972 // Restore the context from the frame.
1974 __ CallRuntime(Runtime::kThrowConstructorReturnedNonObject);
1975 __ bkpt(0);
1976
1977 __ bind(&stack_overflow);
1978 // Restore the context from the frame.
1979 __ TailCallRuntime(Runtime::kThrowStackOverflow);
1980 // Unreachable code.
1981 __ bkpt(0);
1982
1983 // Called Construct on an Object that doesn't have a [[Construct]] internal
1984 // method.
1985 __ bind(&non_constructor);
1986 __ TailCallBuiltin(Builtin::kConstructedNonConstructable);
1987}
1988
1989static void Generate_InterpreterEnterBytecode(MacroAssembler* masm) {
1990 // Set the return address to the correct point in the interpreter entry
1991 // trampoline.
1992 Label builtin_trampoline, trampoline_loaded;
1993 Tagged<Smi> interpreter_entry_return_pc_offset(
1994 masm->isolate()->heap()->interpreter_entry_return_pc_offset());
1995 DCHECK_NE(interpreter_entry_return_pc_offset, Smi::zero());
1996
1997 // If the SFI function_data is an InterpreterData, the function will have a
1998 // custom copy of the interpreter entry trampoline for profiling. If so,
1999 // get the custom trampoline, otherwise grab the entry address of the global
2000 // trampoline.
2002 __ LoadTaggedField(
2003 r4, FieldMemOperand(r4, JSFunction::kSharedFunctionInfoOffset));
2004 __ LoadTaggedField(
2005 r4, FieldMemOperand(r4, SharedFunctionInfo::kTrustedFunctionDataOffset));
2006 __ CompareObjectType(r4, kInterpreterDispatchTableRegister,
2008 INTERPRETER_DATA_TYPE);
2009 __ bne(&builtin_trampoline);
2010
2011 __ LoadTaggedField(
2012 r4, FieldMemOperand(r4, InterpreterData::kInterpreterTrampolineOffset));
2013 __ LoadCodeInstructionStart(r4, r4);
2014 __ b(&trampoline_loaded);
2015
2016 __ bind(&builtin_trampoline);
2017 __ Move(r4, ExternalReference::
2018 address_of_interpreter_entry_trampoline_instruction_start(
2019 masm->isolate()));
2020 __ LoadU64(r4, MemOperand(r4));
2021
2022 __ bind(&trampoline_loaded);
2023 __ AddS64(r14, r4, Operand(interpreter_entry_return_pc_offset.value()));
2024
2025 // Initialize the dispatch table register.
2026 __ Move(
2028 ExternalReference::interpreter_dispatch_table_address(masm->isolate()));
2029
2030 // Get the bytecode array pointer from the frame.
2033
2034 if (v8_flags.debug_code) {
2035 // Check function data field is actually a BytecodeArray object.
2037 __ Assert(
2038 ne, AbortReason::kFunctionDataShouldBeBytecodeArrayOnInterpreterEntry);
2039 __ CompareObjectType(kInterpreterBytecodeArrayRegister, r3, no_reg,
2040 BYTECODE_ARRAY_TYPE);
2041 __ Assert(
2042 eq, AbortReason::kFunctionDataShouldBeBytecodeArrayOnInterpreterEntry);
2043 }
2044
2045 // Get the target bytecode offset from the frame.
2049
2050 if (v8_flags.debug_code) {
2051 Label okay;
2054 __ bge(&okay);
2055 __ bkpt(0);
2056 __ bind(&okay);
2057 }
2058
2059 // Dispatch to the target bytecode.
2060 UseScratchRegisterScope temps(masm);
2061 Register scratch = temps.Acquire();
2064 __ ShiftLeftU64(scratch, scratch, Operand(kSystemPointerSizeLog2));
2068}
2069
2070void Builtins::Generate_InterpreterEnterAtNextBytecode(MacroAssembler* masm) {
2071 // Get bytecode array and bytecode offset from the stack frame.
2077
2078 Label enter_bytecode, function_entry_bytecode;
2082 __ beq(&function_entry_bytecode);
2083
2084 // Load the current bytecode.
2087
2088 // Advance to the next bytecode.
2089 Label if_return;
2092 &if_return);
2093
2094 __ bind(&enter_bytecode);
2095 // Convert new bytecode offset to a Smi and save in the stackframe.
2097 __ StoreU64(r4,
2099
2101
2102 __ bind(&function_entry_bytecode);
2103 // If the code deoptimizes during the implicit function entry stack interrupt
2104 // check, it will have a bailout ID of kFunctionEntryBytecodeOffset, which is
2105 // not a valid bytecode offset. Detect this case and advance to the first
2106 // actual bytecode.
2109 __ b(&enter_bytecode);
2110
2111 // We should never take the if_return path.
2112 __ bind(&if_return);
2113 __ Abort(AbortReason::kInvalidBytecodeAdvance);
2114}
2115
2116void Builtins::Generate_InterpreterEnterAtBytecode(MacroAssembler* masm) {
2118}
2119
2120namespace {
2121void Generate_ContinueToBuiltinHelper(MacroAssembler* masm,
2122 bool javascript_builtin,
2123 bool with_result) {
2124 const RegisterConfiguration* config(RegisterConfiguration::Default());
2125 int allocatable_register_count = config->num_allocatable_general_registers();
2126 Register scratch = ip;
2127 if (with_result) {
2128 if (javascript_builtin) {
2129 __ mov(scratch, r2);
2130 } else {
2131 // Overwrite the hole inserted by the deoptimizer with the return value
2132 // from the LAZY deopt point.
2133 __ StoreU64(
2134 r2, MemOperand(
2135 sp, config->num_allocatable_general_registers() *
2138 }
2139 }
2140 for (int i = allocatable_register_count - 1; i >= 0; --i) {
2141 int code = config->GetAllocatableGeneralCode(i);
2142 __ Pop(Register::from_code(code));
2143 if (javascript_builtin && code == kJavaScriptCallArgCountRegister.code()) {
2145 }
2146 }
2147 if (javascript_builtin && with_result) {
2148 // Overwrite the hole inserted by the deoptimizer with the return value from
2149 // the LAZY deopt point. r0 contains the arguments count, the return value
2150 // from LAZY is always the last argument.
2151 constexpr int return_value_offset =
2154 __ AddS64(r2, r2, Operand(return_value_offset));
2155 __ ShiftLeftU64(r1, r2, Operand(kSystemPointerSizeLog2));
2156 __ StoreU64(scratch, MemOperand(sp, r1));
2157 // Recover arguments count.
2158 __ SubS64(r2, r2, Operand(return_value_offset));
2159 }
2160 __ LoadU64(
2161 fp,
2163 // Load builtin index (stored as a Smi) and use it to get the builtin start
2164 // address from the builtins table.
2165 UseScratchRegisterScope temps(masm);
2166 Register builtin = temps.Acquire();
2167 __ Pop(builtin);
2168 __ AddS64(sp, sp,
2170 __ Pop(r0);
2171 __ mov(r14, r0);
2172 __ LoadEntryFromBuiltinIndex(builtin, builtin);
2173 __ Jump(builtin);
2174}
2175} // namespace
2176
2177void Builtins::Generate_ContinueToCodeStubBuiltin(MacroAssembler* masm) {
2178 Generate_ContinueToBuiltinHelper(masm, false, false);
2179}
2180
2181void Builtins::Generate_ContinueToCodeStubBuiltinWithResult(
2182 MacroAssembler* masm) {
2183 Generate_ContinueToBuiltinHelper(masm, false, true);
2184}
2185
2186void Builtins::Generate_ContinueToJavaScriptBuiltin(MacroAssembler* masm) {
2187 Generate_ContinueToBuiltinHelper(masm, true, false);
2188}
2189
2190void Builtins::Generate_ContinueToJavaScriptBuiltinWithResult(
2191 MacroAssembler* masm) {
2192 Generate_ContinueToBuiltinHelper(masm, true, true);
2193}
2194
2195void Builtins::Generate_NotifyDeoptimized(MacroAssembler* masm) {
2196 {
2197 FrameScope scope(masm, StackFrame::INTERNAL);
2198 __ CallRuntime(Runtime::kNotifyDeoptimized);
2199 }
2200
2202 __ pop(r2);
2203 __ Ret();
2204}
2205
2206// static
2207void Builtins::Generate_FunctionPrototypeApply(MacroAssembler* masm) {
2208 // ----------- S t a t e -------------
2209 // -- r2 : argc
2210 // -- sp[0] : receiver
2211 // -- sp[4] : thisArg
2212 // -- sp[8] : argArray
2213 // -----------------------------------
2214
2215 // 1. Load receiver into r3, argArray into r4 (if present), remove all
2216 // arguments from the stack (including the receiver), and push thisArg (if
2217 // present) instead.
2218 {
2219 __ LoadRoot(r7, RootIndex::kUndefinedValue);
2220 __ mov(r4, r7);
2221 Label done;
2222
2223 __ LoadU64(r3, MemOperand(sp)); // receiver
2224 __ CmpS64(r2, Operand(JSParameterCount(1)));
2225 __ blt(&done);
2226 __ LoadU64(r7, MemOperand(sp, kSystemPointerSize)); // thisArg
2227 __ CmpS64(r2, Operand(JSParameterCount(2)));
2228 __ blt(&done);
2229 __ LoadU64(r4, MemOperand(sp, 2 * kSystemPointerSize)); // argArray
2230
2231 __ bind(&done);
2232 __ DropArgumentsAndPushNewReceiver(r2, r7);
2233 }
2234
2235 // ----------- S t a t e -------------
2236 // -- r4 : argArray
2237 // -- r3 : receiver
2238 // -- sp[0] : thisArg
2239 // -----------------------------------
2240
2241 // 2. We don't need to check explicitly for callable receiver here,
2242 // since that's the first thing the Call/CallWithArrayLike builtins
2243 // will do.
2244
2245 // 3. Tail call with no arguments if argArray is null or undefined.
2246 Label no_arguments;
2247 __ JumpIfRoot(r4, RootIndex::kNullValue, &no_arguments);
2248 __ JumpIfRoot(r4, RootIndex::kUndefinedValue, &no_arguments);
2249
2250 // 4a. Apply the receiver to the given argArray.
2251 __ TailCallBuiltin(Builtin::kCallWithArrayLike);
2252
2253 // 4b. The argArray is either null or undefined, so we tail call without any
2254 // arguments to the receiver.
2255 __ bind(&no_arguments);
2256 {
2257 __ mov(r2, Operand(JSParameterCount(0)));
2258 __ TailCallBuiltin(Builtins::Call());
2259 }
2260}
2261
2262// static
2263void Builtins::Generate_FunctionPrototypeCall(MacroAssembler* masm) {
2264 // 1. Get the callable to call (passed as receiver) from the stack.
2265 __ Pop(r3);
2266
2267 // 2. Make sure we have at least one argument.
2268 // r2: actual number of arguments
2269 {
2270 Label done;
2271 __ CmpS64(r2, Operand(JSParameterCount(0)));
2272 __ b(ne, &done);
2273 __ PushRoot(RootIndex::kUndefinedValue);
2274 __ AddS64(r2, r2, Operand(1));
2275 __ bind(&done);
2276 }
2277
2278 // 3. Adjust the actual number of arguments.
2279 __ SubS64(r2, r2, Operand(1));
2280
2281 // 4. Call the callable.
2282 __ TailCallBuiltin(Builtins::Call());
2283}
2284
2285void Builtins::Generate_ReflectApply(MacroAssembler* masm) {
2286 // ----------- S t a t e -------------
2287 // -- r2 : argc
2288 // -- sp[0] : receiver
2289 // -- sp[4] : target (if argc >= 1)
2290 // -- sp[8] : thisArgument (if argc >= 2)
2291 // -- sp[12] : argumentsList (if argc == 3)
2292 // -----------------------------------
2293
2294 // 1. Load target into r3 (if present), argumentsList into r4 (if present),
2295 // remove all arguments from the stack (including the receiver), and push
2296 // thisArgument (if present) instead.
2297 {
2298 __ LoadRoot(r3, RootIndex::kUndefinedValue);
2299 __ mov(r7, r3);
2300 __ mov(r4, r3);
2301
2302 Label done;
2303
2304 __ CmpS64(r2, Operand(JSParameterCount(1)));
2305 __ blt(&done);
2306 __ LoadU64(r3, MemOperand(sp, kSystemPointerSize)); // thisArg
2307 __ CmpS64(r2, Operand(JSParameterCount(2)));
2308 __ blt(&done);
2309 __ LoadU64(r7, MemOperand(sp, 2 * kSystemPointerSize)); // argArray
2310 __ CmpS64(r2, Operand(JSParameterCount(3)));
2311 __ blt(&done);
2312 __ LoadU64(r4, MemOperand(sp, 3 * kSystemPointerSize)); // argArray
2313
2314 __ bind(&done);
2315 __ DropArgumentsAndPushNewReceiver(r2, r7);
2316 }
2317
2318 // ----------- S t a t e -------------
2319 // -- r4 : argumentsList
2320 // -- r3 : target
2321 // -- sp[0] : thisArgument
2322 // -----------------------------------
2323
2324 // 2. We don't need to check explicitly for callable target here,
2325 // since that's the first thing the Call/CallWithArrayLike builtins
2326 // will do.
2327
2328 // 3 Apply the target to the given argumentsList.
2329 __ TailCallBuiltin(Builtin::kCallWithArrayLike);
2330}
2331
2332void Builtins::Generate_ReflectConstruct(MacroAssembler* masm) {
2333 // ----------- S t a t e -------------
2334 // -- r2 : argc
2335 // -- sp[0] : receiver
2336 // -- sp[4] : target
2337 // -- sp[8] : argumentsList
2338 // -- sp[12] : new.target (optional)
2339 // -----------------------------------
2340
2341 // 1. Load target into r3 (if present), argumentsList into r4 (if present),
2342 // new.target into r5 (if present, otherwise use target), remove all
2343 // arguments from the stack (including the receiver), and push thisArgument
2344 // (if present) instead.
2345 {
2346 __ LoadRoot(r3, RootIndex::kUndefinedValue);
2347 __ mov(r4, r3);
2348
2349 Label done;
2350
2351 __ mov(r6, r3);
2352 __ CmpS64(r2, Operand(JSParameterCount(1)));
2353 __ blt(&done);
2354 __ LoadU64(r3, MemOperand(sp, kSystemPointerSize)); // thisArg
2355 __ mov(r5, r3);
2356 __ CmpS64(r2, Operand(JSParameterCount(2)));
2357 __ blt(&done);
2358 __ LoadU64(r4, MemOperand(sp, 2 * kSystemPointerSize)); // argArray
2359 __ CmpS64(r2, Operand(JSParameterCount(3)));
2360 __ blt(&done);
2361 __ LoadU64(r5, MemOperand(sp, 3 * kSystemPointerSize)); // argArray
2362 __ bind(&done);
2363 __ DropArgumentsAndPushNewReceiver(r2, r6);
2364 }
2365
2366 // ----------- S t a t e -------------
2367 // -- r4 : argumentsList
2368 // -- r5 : new.target
2369 // -- r3 : target
2370 // -- sp[0] : receiver (undefined)
2371 // -----------------------------------
2372
2373 // 2. We don't need to check explicitly for constructor target here,
2374 // since that's the first thing the Construct/ConstructWithArrayLike
2375 // builtins will do.
2376
2377 // 3. We don't need to check explicitly for constructor new.target here,
2378 // since that's the second thing the Construct/ConstructWithArrayLike
2379 // builtins will do.
2380
2381 // 4. Construct the target with the given new.target and argumentsList.
2382 __ TailCallBuiltin(Builtin::kConstructWithArrayLike);
2383}
2384
2385namespace {
2386
2387// Allocate new stack space for |count| arguments and shift all existing
2388// arguments already on the stack. |pointer_to_new_space_out| points to the
2389// first free slot on the stack to copy additional arguments to and
2390// |argc_in_out| is updated to include |count|.
2391void Generate_AllocateSpaceAndShiftExistingArguments(
2392 MacroAssembler* masm, Register count, Register argc_in_out,
2393 Register pointer_to_new_space_out, Register scratch1, Register scratch2) {
2394 DCHECK(!AreAliased(count, argc_in_out, pointer_to_new_space_out, scratch1,
2395 scratch2));
2396 Register old_sp = scratch1;
2397 Register new_space = scratch2;
2398 __ mov(old_sp, sp);
2399 __ ShiftLeftU64(new_space, count, Operand(kSystemPointerSizeLog2));
2400 __ AllocateStackSpace(new_space);
2401
2402 Register end = scratch2;
2403 Register value = r1;
2404 Register dest = pointer_to_new_space_out;
2405 __ mov(dest, sp);
2406 __ ShiftLeftU64(r0, argc_in_out, Operand(kSystemPointerSizeLog2));
2407 __ AddS64(end, old_sp, r0);
2408 Label loop, done;
2409 __ bind(&loop);
2410 __ CmpS64(old_sp, end);
2411 __ bge(&done);
2412 __ LoadU64(value, MemOperand(old_sp));
2413 __ lay(old_sp, MemOperand(old_sp, kSystemPointerSize));
2414 __ StoreU64(value, MemOperand(dest));
2415 __ lay(dest, MemOperand(dest, kSystemPointerSize));
2416 __ b(&loop);
2417 __ bind(&done);
2418
2419 // Update total number of arguments.
2420 __ AddS64(argc_in_out, argc_in_out, count);
2421}
2422
2423} // namespace
2424
2425// static
2426// TODO(v8:11615): Observe Code::kMaxArguments in CallOrConstructVarargs
2427void Builtins::Generate_CallOrConstructVarargs(MacroAssembler* masm,
2428 Builtin target_builtin) {
2429 // ----------- S t a t e -------------
2430 // -- r3 : target
2431 // -- r2 : number of parameters on the stack
2432 // -- r4 : arguments list (a FixedArray)
2433 // -- r6 : len (number of elements to push from args)
2434 // -- r5 : new.target (for [[Construct]])
2435 // -----------------------------------
2436
2437 Register scratch = ip;
2438
2439 if (v8_flags.debug_code) {
2440 // Allow r4 to be a FixedArray, or a FixedDoubleArray if r6 == 0.
2441 Label ok, fail;
2442 __ AssertNotSmi(r4);
2443 __ LoadTaggedField(scratch, FieldMemOperand(r4, HeapObject::kMapOffset));
2444 __ LoadS16(scratch, FieldMemOperand(scratch, Map::kInstanceTypeOffset));
2445 __ CmpS64(scratch, Operand(FIXED_ARRAY_TYPE));
2446 __ beq(&ok);
2447 __ CmpS64(scratch, Operand(FIXED_DOUBLE_ARRAY_TYPE));
2448 __ bne(&fail);
2449 __ CmpS64(r6, Operand::Zero());
2450 __ beq(&ok);
2451 // Fall through.
2452 __ bind(&fail);
2453 __ Abort(AbortReason::kOperandIsNotAFixedArray);
2454
2455 __ bind(&ok);
2456 }
2457
2458 // Check for stack overflow.
2459 Label stack_overflow;
2460 __ StackOverflowCheck(r6, scratch, &stack_overflow);
2461
2462 // Move the arguments already in the stack,
2463 // including the receiver and the return address.
2464 // r6: Number of arguments to make room for.
2465 // r2: Number of arguments already on the stack.
2466 // r7: Points to first free slot on the stack after arguments were shifted.
2467 Generate_AllocateSpaceAndShiftExistingArguments(masm, r6, r2, r7, ip, r8);
2468
2469 // Push arguments onto the stack (thisArgument is already on the stack).
2470 {
2471 Label loop, no_args, skip;
2472 __ CmpS64(r6, Operand::Zero());
2473 __ beq(&no_args);
2474 __ AddS64(r4, r4,
2475 Operand(OFFSET_OF_DATA_START(FixedArray) - kHeapObjectTag -
2476 kTaggedSize));
2477 __ mov(r1, r6);
2478 __ bind(&loop);
2479 __ LoadTaggedField(scratch, MemOperand(r4, kTaggedSize), r0);
2480 __ la(r4, MemOperand(r4, kTaggedSize));
2481 __ CompareRoot(scratch, RootIndex::kTheHoleValue);
2482 __ bne(&skip, Label::kNear);
2483 __ LoadRoot(scratch, RootIndex::kUndefinedValue);
2484 __ bind(&skip);
2485 __ StoreU64(scratch, MemOperand(r7));
2486 __ lay(r7, MemOperand(r7, kSystemPointerSize));
2487 __ BranchOnCount(r1, &loop);
2488 __ bind(&no_args);
2489 }
2490
2491 // Tail-call to the actual Call or Construct builtin.
2492 __ TailCallBuiltin(target_builtin);
2493
2494 __ bind(&stack_overflow);
2495 __ TailCallRuntime(Runtime::kThrowStackOverflow);
2496}
2497
2498// static
2499void Builtins::Generate_CallOrConstructForwardVarargs(MacroAssembler* masm,
2500 CallOrConstructMode mode,
2501 Builtin target_builtin) {
2502 // ----------- S t a t e -------------
2503 // -- r2 : the number of arguments
2504 // -- r5 : the new.target (for [[Construct]] calls)
2505 // -- r3 : the target to call (can be any Object)
2506 // -- r4 : start index (to support rest parameters)
2507 // -----------------------------------
2508
2509 Register scratch = r8;
2510
2511 if (mode == CallOrConstructMode::kConstruct) {
2512 Label new_target_constructor, new_target_not_constructor;
2513 __ JumpIfSmi(r5, &new_target_not_constructor);
2514 __ LoadTaggedField(scratch, FieldMemOperand(r5, HeapObject::kMapOffset));
2515 __ LoadU8(scratch, FieldMemOperand(scratch, Map::kBitFieldOffset));
2516 __ tmll(scratch, Operand(Map::Bits1::IsConstructorBit::kShift));
2517 __ bne(&new_target_constructor);
2518 __ bind(&new_target_not_constructor);
2519 {
2520 FrameScope scope(masm, StackFrame::MANUAL);
2521 __ EnterFrame(StackFrame::INTERNAL);
2522 __ Push(r5);
2523 __ CallRuntime(Runtime::kThrowNotConstructor);
2524 __ Trap(); // Unreachable.
2525 }
2526 __ bind(&new_target_constructor);
2527 }
2528
2529 Label stack_done, stack_overflow;
2531 __ SubS64(r7, r7, Operand(kJSArgcReceiverSlots));
2532 __ SubS64(r7, r7, r4);
2533 __ ble(&stack_done);
2534 {
2535 // ----------- S t a t e -------------
2536 // -- r2 : the number of arguments already in the stack
2537 // -- r3 : the target to call (can be any Object)
2538 // -- r4 : start index (to support rest parameters)
2539 // -- r5 : the new.target (for [[Construct]] calls)
2540 // -- r6 : point to the caller stack frame
2541 // -- r7 : number of arguments to copy, i.e. arguments count - start index
2542 // -----------------------------------
2543
2544 // Check for stack overflow.
2545 __ StackOverflowCheck(r7, scratch, &stack_overflow);
2546
2547 // Forward the arguments from the caller frame.
2548 __ mov(r5, r5);
2549 // Point to the first argument to copy (skipping the receiver).
2550 __ AddS64(r6, fp,
2553 __ ShiftLeftU64(scratch, r4, Operand(kSystemPointerSizeLog2));
2554 __ AddS64(r6, r6, scratch);
2555
2556 // Move the arguments already in the stack,
2557 // including the receiver and the return address.
2558 // r7: Number of arguments to make room for.0
2559 // r2: Number of arguments already on the stack.
2560 // r4: Points to first free slot on the stack after arguments were shifted.
2561 Generate_AllocateSpaceAndShiftExistingArguments(masm, r7, r2, r4, scratch,
2562 ip);
2563
2564 // Copy arguments from the caller frame.
2565 // TODO(victorgomes): Consider using forward order as potentially more cache
2566 // friendly.
2567 {
2568 Label loop;
2569 __ bind(&loop);
2570 {
2571 __ SubS64(r7, r7, Operand(1));
2572 __ ShiftLeftU64(r1, r7, Operand(kSystemPointerSizeLog2));
2573 __ LoadU64(scratch, MemOperand(r6, r1));
2574 __ StoreU64(scratch, MemOperand(r4, r1));
2575 __ CmpS64(r7, Operand::Zero());
2576 __ bne(&loop);
2577 }
2578 }
2579 }
2580 __ bind(&stack_done);
2581 // Tail-call to the actual Call or Construct builtin.
2582 __ TailCallBuiltin(target_builtin);
2583
2584 __ bind(&stack_overflow);
2585 __ TailCallRuntime(Runtime::kThrowStackOverflow);
2586}
2587
2588// static
2589void Builtins::Generate_CallFunction(MacroAssembler* masm,
2590 ConvertReceiverMode mode) {
2591 // ----------- S t a t e -------------
2592 // -- r2 : the number of arguments
2593 // -- r3 : the function to call (checked to be a JSFunction)
2594 // -----------------------------------
2595 __ AssertCallableFunction(r3);
2596
2597 __ LoadTaggedField(
2598 r4, FieldMemOperand(r3, JSFunction::kSharedFunctionInfoOffset));
2599
2600 // Enter the context of the function; ToObject has to run in the function
2601 // context, and we also need to take the global proxy from the function
2602 // context in case of conversion.
2603 __ LoadTaggedField(cp, FieldMemOperand(r3, JSFunction::kContextOffset));
2604 // We need to convert the receiver for non-native sloppy mode functions.
2605 Label done_convert;
2606 __ LoadU32(r5, FieldMemOperand(r4, SharedFunctionInfo::kFlagsOffset));
2607 __ AndP(r0, r5,
2608 Operand(SharedFunctionInfo::IsStrictBit::kMask |
2609 SharedFunctionInfo::IsNativeBit::kMask));
2610 __ bne(&done_convert);
2611 {
2612 // ----------- S t a t e -------------
2613 // -- r2 : the number of arguments
2614 // -- r3 : the function to call (checked to be a JSFunction)
2615 // -- r4 : the shared function info.
2616 // -- cp : the function context.
2617 // -----------------------------------
2618
2620 // Patch receiver to global proxy.
2621 __ LoadGlobalProxy(r5);
2622 } else {
2623 Label convert_to_object, convert_receiver;
2624 __ LoadReceiver(r5);
2625 __ JumpIfSmi(r5, &convert_to_object);
2626 static_assert(LAST_JS_RECEIVER_TYPE == LAST_TYPE);
2627 __ CompareObjectType(r5, r6, r6, FIRST_JS_RECEIVER_TYPE);
2628 __ bge(&done_convert);
2630 Label convert_global_proxy;
2631 __ JumpIfRoot(r5, RootIndex::kUndefinedValue, &convert_global_proxy);
2632 __ JumpIfNotRoot(r5, RootIndex::kNullValue, &convert_to_object);
2633 __ bind(&convert_global_proxy);
2634 {
2635 // Patch receiver to global proxy.
2636 __ LoadGlobalProxy(r5);
2637 }
2638 __ b(&convert_receiver);
2639 }
2640 __ bind(&convert_to_object);
2641 {
2642 // Convert receiver using ToObject.
2643 // TODO(bmeurer): Inline the allocation here to avoid building the frame
2644 // in the fast case? (fall back to AllocateInNewSpace?)
2645 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
2646 __ SmiTag(r2);
2647 __ Push(r2, r3);
2648 __ mov(r2, r5);
2649 __ Push(cp);
2650 __ CallBuiltin(Builtin::kToObject);
2651 __ Pop(cp);
2652 __ mov(r5, r2);
2653 __ Pop(r2, r3);
2654 __ SmiUntag(r2);
2655 }
2656 __ LoadTaggedField(
2657 r4, FieldMemOperand(r3, JSFunction::kSharedFunctionInfoOffset));
2658 __ bind(&convert_receiver);
2659 }
2660 __ StoreReceiver(r5);
2661 }
2662 __ bind(&done_convert);
2663
2664 // ----------- S t a t e -------------
2665 // -- r2 : the number of arguments
2666 // -- r3 : the function to call (checked to be a JSFunction)
2667 // -- r4 : the shared function info.
2668 // -- cp : the function context.
2669 // -----------------------------------
2670
2671 __ LoadU16(
2672 r4, FieldMemOperand(r4, SharedFunctionInfo::kFormalParameterCountOffset));
2673 __ InvokeFunctionCode(r3, no_reg, r4, r2, InvokeType::kJump);
2674}
2675
2676namespace {
2677
2678void Generate_PushBoundArguments(MacroAssembler* masm) {
2679 // ----------- S t a t e -------------
2680 // -- r2 : the number of arguments
2681 // -- r3 : target (checked to be a JSBoundFunction)
2682 // -- r5 : new.target (only in case of [[Construct]])
2683 // -----------------------------------
2684
2685 // Load [[BoundArguments]] into r4 and length of that into r6.
2686 Label no_bound_arguments;
2687 __ LoadTaggedField(
2688 r4, FieldMemOperand(r3, JSBoundFunction::kBoundArgumentsOffset));
2689 __ SmiUntagField(r6, FieldMemOperand(r4, offsetof(FixedArray, length_)));
2690 __ LoadAndTestP(r6, r6);
2691 __ beq(&no_bound_arguments);
2692 {
2693 // ----------- S t a t e -------------
2694 // -- r2 : the number of arguments
2695 // -- r3 : target (checked to be a JSBoundFunction)
2696 // -- r4 : the [[BoundArguments]] (implemented as FixedArray)
2697 // -- r5 : new.target (only in case of [[Construct]])
2698 // -- r6 : the number of [[BoundArguments]]
2699 // -----------------------------------
2700
2701 Register scratch = r8;
2702 // Reserve stack space for the [[BoundArguments]].
2703 {
2704 Label done;
2705 __ ShiftLeftU64(scratch, r6, Operand(kSystemPointerSizeLog2));
2706 __ SubS64(r1, sp, scratch);
2707 // Check the stack for overflow. We are not trying to catch interruptions
2708 // (i.e. debug break and preemption) here, so check the "real stack
2709 // limit".
2710 __ CmpU64(r1, __ StackLimitAsMemOperand(StackLimitKind::kRealStackLimit));
2711 __ bgt(&done); // Signed comparison.
2712 // Restore the stack pointer.
2713 {
2714 FrameScope scope(masm, StackFrame::MANUAL);
2715 __ EnterFrame(StackFrame::INTERNAL);
2716 __ CallRuntime(Runtime::kThrowStackOverflow);
2717 }
2718 __ bind(&done);
2719 }
2720
2721 // Pop receiver.
2722 __ Pop(r7);
2723
2724 // Push [[BoundArguments]].
2725 {
2726 Label loop, done;
2727 __ AddS64(r2, r2, r6); // Adjust effective number of arguments.
2728 __ AddS64(r4, r4,
2729 Operand(OFFSET_OF_DATA_START(FixedArray) - kHeapObjectTag));
2730
2731 __ bind(&loop);
2732 __ SubS64(r1, r6, Operand(1));
2733 __ ShiftLeftU64(r1, r1, Operand(kTaggedSizeLog2));
2734 __ LoadTaggedField(scratch, MemOperand(r4, r1), r0);
2735 __ Push(scratch);
2736 __ SubS64(r6, r6, Operand(1));
2737 __ bgt(&loop);
2738 __ bind(&done);
2739 }
2740
2741 // Push receiver.
2742 __ Push(r7);
2743 }
2744 __ bind(&no_bound_arguments);
2745}
2746
2747} // namespace
2748
2749// static
2750void Builtins::Generate_CallBoundFunctionImpl(MacroAssembler* masm) {
2751 // ----------- S t a t e -------------
2752 // -- r2 : the number of arguments
2753 // -- r3 : the function to call (checked to be a JSBoundFunction)
2754 // -----------------------------------
2755 __ AssertBoundFunction(r3);
2756
2757 // Patch the receiver to [[BoundThis]].
2758 __ LoadTaggedField(r5,
2759 FieldMemOperand(r3, JSBoundFunction::kBoundThisOffset));
2760 __ StoreReceiver(r5);
2761
2762 // Push the [[BoundArguments]] onto the stack.
2763 Generate_PushBoundArguments(masm);
2764
2765 // Call the [[BoundTargetFunction]] via the Call builtin.
2766 __ LoadTaggedField(
2767 r3, FieldMemOperand(r3, JSBoundFunction::kBoundTargetFunctionOffset));
2768 __ TailCallBuiltin(Builtins::Call());
2769}
2770
2771// static
2772void Builtins::Generate_Call(MacroAssembler* masm, ConvertReceiverMode mode) {
2773 // ----------- S t a t e -------------
2774 // -- r2 : the number of arguments
2775 // -- r3 : the target to call (can be any Object).
2776 // -----------------------------------
2777 Register target = r3;
2778 Register map = r6;
2779 Register instance_type = r7;
2780 Register scratch = r8;
2781 DCHECK(!AreAliased(r2, target, map, instance_type));
2782
2783 Label non_callable, class_constructor;
2784 __ JumpIfSmi(target, &non_callable);
2785 __ LoadMap(map, target);
2786 __ CompareInstanceTypeRange(map, instance_type, scratch,
2789 __ TailCallBuiltin(Builtins::CallFunction(mode), le);
2790 __ CmpS64(instance_type, Operand(JS_BOUND_FUNCTION_TYPE));
2791 __ TailCallBuiltin(Builtin::kCallBoundFunction, eq);
2792
2793 // Check if target has a [[Call]] internal method.
2794 {
2795 Register flags = r6;
2796 __ LoadU8(flags, FieldMemOperand(map, Map::kBitFieldOffset));
2797 map = no_reg;
2798 __ TestBit(flags, Map::Bits1::IsCallableBit::kShift);
2799 __ beq(&non_callable);
2800 }
2801
2802 // Check if target is a proxy and call CallProxy external builtin
2803 __ CmpS64(instance_type, Operand(JS_PROXY_TYPE));
2804 __ TailCallBuiltin(Builtin::kCallProxy, eq);
2805
2806 // Check if target is a wrapped function and call CallWrappedFunction external
2807 // builtin
2808 __ CmpS64(instance_type, Operand(JS_WRAPPED_FUNCTION_TYPE));
2809 __ TailCallBuiltin(Builtin::kCallWrappedFunction, eq);
2810
2811 // ES6 section 9.2.1 [[Call]] ( thisArgument, argumentsList)
2812 // Check that the function is not a "classConstructor".
2813 __ CmpS64(instance_type, Operand(JS_CLASS_CONSTRUCTOR_TYPE));
2814 __ beq(&class_constructor);
2815
2816 // 2. Call to something else, which might have a [[Call]] internal method (if
2817 // not we raise an exception).
2818 // Overwrite the original receiver the (original) target.
2819 __ StoreReceiver(target);
2820 // Let the "call_as_function_delegate" take care of the rest.
2821 __ LoadNativeContextSlot(target, Context::CALL_AS_FUNCTION_DELEGATE_INDEX);
2822 __ TailCallBuiltin(
2824
2825 // 3. Call to something that is not callable.
2826 __ bind(&non_callable);
2827 {
2828 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
2829 __ Push(target);
2830 __ CallRuntime(Runtime::kThrowCalledNonCallable);
2831 __ Trap(); // Unreachable.
2832 }
2833
2834 // 4. The function is a "classConstructor", need to raise an exception.
2835 __ bind(&class_constructor);
2836 {
2837 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
2838 __ Push(target);
2839 __ CallRuntime(Runtime::kThrowConstructorNonCallableError);
2840 __ Trap(); // Unreachable.
2841 }
2842}
2843
2844// static
2845void Builtins::Generate_ConstructFunction(MacroAssembler* masm) {
2846 // ----------- S t a t e -------------
2847 // -- r2 : the number of arguments
2848 // -- r3 : the constructor to call (checked to be a JSFunction)
2849 // -- r5 : the new target (checked to be a constructor)
2850 // -----------------------------------
2851 __ AssertConstructor(r3, r1);
2852 __ AssertFunction(r3);
2853
2854 // Calling convention for function specific ConstructStubs require
2855 // r4 to contain either an AllocationSite or undefined.
2856 __ LoadRoot(r4, RootIndex::kUndefinedValue);
2857
2858 Label call_generic_stub;
2859
2860 // Jump to JSBuiltinsConstructStub or JSConstructStubGeneric.
2861 __ LoadTaggedField(
2862 r6, FieldMemOperand(r3, JSFunction::kSharedFunctionInfoOffset));
2863 __ LoadU32(r6, FieldMemOperand(r6, SharedFunctionInfo::kFlagsOffset));
2864 __ AndP(r6, Operand(SharedFunctionInfo::ConstructAsBuiltinBit::kMask));
2865 __ beq(&call_generic_stub);
2866
2867 __ TailCallBuiltin(Builtin::kJSBuiltinsConstructStub);
2868
2869 __ bind(&call_generic_stub);
2870 __ TailCallBuiltin(Builtin::kJSConstructStubGeneric);
2871}
2872
2873// static
2874void Builtins::Generate_ConstructBoundFunction(MacroAssembler* masm) {
2875 // ----------- S t a t e -------------
2876 // -- r2 : the number of arguments
2877 // -- r3 : the function to call (checked to be a JSBoundFunction)
2878 // -- r5 : the new target (checked to be a constructor)
2879 // -----------------------------------
2880 __ AssertConstructor(r3, r1);
2881 __ AssertBoundFunction(r3);
2882
2883 // Push the [[BoundArguments]] onto the stack.
2884 Generate_PushBoundArguments(masm);
2885
2886 // Patch new.target to [[BoundTargetFunction]] if new.target equals target.
2887 Label skip;
2888 __ CompareTagged(r3, r5);
2889 __ bne(&skip);
2890 __ LoadTaggedField(
2891 r5, FieldMemOperand(r3, JSBoundFunction::kBoundTargetFunctionOffset));
2892 __ bind(&skip);
2893
2894 // Construct the [[BoundTargetFunction]] via the Construct builtin.
2895 __ LoadTaggedField(
2896 r3, FieldMemOperand(r3, JSBoundFunction::kBoundTargetFunctionOffset));
2897 __ TailCallBuiltin(Builtin::kConstruct);
2898}
2899
2900// static
2901void Builtins::Generate_Construct(MacroAssembler* masm) {
2902 // ----------- S t a t e -------------
2903 // -- r2 : the number of arguments
2904 // -- r3 : the constructor to call (can be any Object)
2905 // -- r5 : the new target (either the same as the constructor or
2906 // the JSFunction on which new was invoked initially)
2907 // -----------------------------------
2908 Register target = r3;
2909 Register map = r6;
2910 Register instance_type = r7;
2911 Register scratch = r8;
2912 DCHECK(!AreAliased(r2, target, map, instance_type, scratch));
2913
2914 // Check if target is a Smi.
2915 Label non_constructor, non_proxy;
2916 __ JumpIfSmi(target, &non_constructor);
2917
2918 // Check if target has a [[Construct]] internal method.
2919 __ LoadTaggedField(map, FieldMemOperand(target, HeapObject::kMapOffset));
2920 {
2921 Register flags = r4;
2922 DCHECK(!AreAliased(r2, target, map, instance_type, flags));
2923 __ LoadU8(flags, FieldMemOperand(map, Map::kBitFieldOffset));
2924 __ TestBit(flags, Map::Bits1::IsConstructorBit::kShift);
2925 __ beq(&non_constructor);
2926 }
2927
2928 // Dispatch based on instance type.
2929 __ CompareInstanceTypeRange(map, instance_type, scratch,
2930 FIRST_JS_FUNCTION_TYPE, LAST_JS_FUNCTION_TYPE);
2931 __ TailCallBuiltin(Builtin::kConstructFunction, le);
2932
2933 // Only dispatch to bound functions after checking whether they are
2934 // constructors.
2935 __ CmpS64(instance_type, Operand(JS_BOUND_FUNCTION_TYPE));
2936 __ TailCallBuiltin(Builtin::kConstructBoundFunction, eq);
2937
2938 // Only dispatch to proxies after checking whether they are constructors.
2939 __ CmpS64(instance_type, Operand(JS_PROXY_TYPE));
2940 __ bne(&non_proxy);
2941 __ TailCallBuiltin(Builtin::kConstructProxy);
2942
2943 // Called Construct on an exotic Object with a [[Construct]] internal method.
2944 __ bind(&non_proxy);
2945 {
2946 // Overwrite the original receiver with the (original) target.
2947 __ StoreReceiver(target);
2948 // Let the "call_as_constructor_delegate" take care of the rest.
2949 __ LoadNativeContextSlot(target,
2950 Context::CALL_AS_CONSTRUCTOR_DELEGATE_INDEX);
2951 __ TailCallBuiltin(Builtins::CallFunction());
2952 }
2953
2954 // Called Construct on an Object that doesn't have a [[Construct]] internal
2955 // method.
2956 __ bind(&non_constructor);
2957 __ TailCallBuiltin(Builtin::kConstructedNonConstructable);
2958}
2959
2960#ifdef V8_ENABLE_MAGLEV
2961
2962void Builtins::Generate_MaglevFunctionEntryStackCheck(MacroAssembler* masm,
2963 bool save_new_target) {
2964 // Input (r0): Stack size (Smi).
2965 // This builtin can be invoked just after Maglev's prologue.
2966 // All registers are available, except (possibly) new.target.
2967 ASM_CODE_COMMENT(masm);
2968 {
2969 FrameScope scope(masm, StackFrame::INTERNAL);
2970 __ AssertSmi(r2);
2971 if (save_new_target) {
2973 }
2974 __ Push(r2);
2975 __ CallRuntime(Runtime::kStackGuardWithGap, 1);
2976 if (save_new_target) {
2978 }
2979 }
2980 __ Ret();
2981}
2982
2983#endif // V8_ENABLE_MAGLEV
2984
2985#if V8_ENABLE_WEBASSEMBLY
2986
2987struct SaveWasmParamsScope {
2988 explicit SaveWasmParamsScope(MacroAssembler* masm) : masm(masm) {
2989 for (Register gp_param_reg : wasm::kGpParamRegisters) {
2990 gp_regs.set(gp_param_reg);
2991 }
2992 for (DoubleRegister fp_param_reg : wasm::kFpParamRegisters) {
2993 fp_regs.set(fp_param_reg);
2994 }
2995
2996 CHECK_EQ(gp_regs.Count(), arraysize(wasm::kGpParamRegisters));
2997 CHECK_EQ(fp_regs.Count(), arraysize(wasm::kFpParamRegisters));
2998 CHECK_EQ(WasmLiftoffSetupFrameConstants::kNumberOfSavedGpParamRegs + 1,
2999 gp_regs.Count());
3000 CHECK_EQ(WasmLiftoffSetupFrameConstants::kNumberOfSavedFpParamRegs,
3001 fp_regs.Count());
3002
3003 __ MultiPush(gp_regs);
3004 __ MultiPushF64OrV128(fp_regs, r1);
3005 }
3006 ~SaveWasmParamsScope() {
3007 __ MultiPopF64OrV128(fp_regs, r1);
3008 __ MultiPop(gp_regs);
3009 }
3010
3011 RegList gp_regs;
3012 DoubleRegList fp_regs;
3013 MacroAssembler* masm;
3014};
3015
3016void Builtins::Generate_WasmLiftoffFrameSetup(MacroAssembler* masm) {
3017 Register func_index = wasm::kLiftoffFrameSetupFunctionReg;
3018 Register vector = ip;
3019 Register scratch = r0;
3020 Label allocate_vector, done;
3021
3022 __ LoadTaggedField(
3024 WasmTrustedInstanceData::kFeedbackVectorsOffset));
3025 __ ShiftLeftU64(scratch, func_index, Operand(kTaggedSizeLog2));
3026 __ AddS64(vector, vector, scratch);
3027 __ LoadTaggedField(vector,
3028 FieldMemOperand(vector, OFFSET_OF_DATA_START(FixedArray)));
3029 __ JumpIfSmi(vector, &allocate_vector);
3030 __ bind(&done);
3032 __ push(vector);
3033 __ Ret();
3034
3035 __ bind(&allocate_vector);
3036
3037 // Feedback vector doesn't exist yet. Call the runtime to allocate it.
3038 // We temporarily change the frame type for this, because we need special
3039 // handling by the stack walker in case of GC.
3040 __ mov(scratch,
3041 Operand(StackFrame::TypeToMarker(StackFrame::WASM_LIFTOFF_SETUP)));
3042 __ StoreU64(scratch, MemOperand(sp));
3043
3044 // Save current return address as it will get clobbered during CallRuntime.
3045 __ push(r14);
3046 {
3047 SaveWasmParamsScope save_params(masm);
3048 // Arguments to the runtime function: instance data, func_index.
3050 __ SmiTag(func_index);
3051 __ push(func_index);
3052 // Allocate a stack slot where the runtime function can spill a pointer
3053 // to the {NativeModule}.
3054 __ push(r10);
3055 __ LoadSmiLiteral(cp, Smi::zero());
3056 __ CallRuntime(Runtime::kWasmAllocateFeedbackVector, 3);
3057 __ mov(vector, kReturnRegister0);
3058 // Saved parameters are restored at the end of this block.
3059 }
3060 __ pop(r14);
3061
3062 __ mov(scratch, Operand(StackFrame::TypeToMarker(StackFrame::WASM)));
3063 __ StoreU64(scratch, MemOperand(sp));
3064 __ b(&done);
3065}
3066
3067void Builtins::Generate_WasmCompileLazy(MacroAssembler* masm) {
3068 // The function index was put in a register by the jump table trampoline.
3069 // Convert to Smi for the runtime call.
3071
3072 {
3073 HardAbortScope hard_abort(masm); // Avoid calls to Abort.
3074 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
3075
3076 {
3077 SaveWasmParamsScope save_params(masm);
3078
3079 // Push the instance data as an explicit argument to the runtime function.
3081 // Push the function index as second argument.
3083 // Initialize the JavaScript context with 0. CEntry will use it to
3084 // set the current context on the isolate.
3085 __ LoadSmiLiteral(cp, Smi::zero());
3086 __ CallRuntime(Runtime::kWasmCompileLazy, 2);
3087 // The runtime function returns the jump table slot offset as a Smi. Use
3088 // that to compute the jump target in ip.
3090 __ mov(ip, kReturnRegister0);
3091
3092 // Saved parameters are restored at the end of this block.
3093 }
3094
3095 // After the instance data register has been restored, we can add the jump
3096 // table start to the jump table offset already stored in r8.
3097 __ LoadU64(r0,
3099 WasmTrustedInstanceData::kJumpTableStartOffset));
3100 __ AddS64(ip, ip, r0);
3101 }
3102
3103 // Finally, jump to the jump table slot for the function.
3104 __ Jump(ip);
3105}
3106
3107void Builtins::Generate_WasmDebugBreak(MacroAssembler* masm) {
3108 HardAbortScope hard_abort(masm); // Avoid calls to Abort.
3109 {
3110 FrameAndConstantPoolScope scope(masm, StackFrame::WASM_DEBUG_BREAK);
3111
3112 // Save all parameter registers. They might hold live values, we restore
3113 // them after the runtime call.
3115 __ MultiPushF64OrV128(WasmDebugBreakFrameConstants::kPushedFpRegs, ip);
3116
3117 // Initialize the JavaScript context with 0. CEntry will use it to
3118 // set the current context on the isolate.
3119 __ LoadSmiLiteral(cp, Smi::zero());
3120 __ CallRuntime(Runtime::kWasmDebugBreak, 0);
3121
3122 // Restore registers.
3123 __ MultiPopF64OrV128(WasmDebugBreakFrameConstants::kPushedFpRegs, ip);
3125 }
3126 __ Ret();
3127}
3128
3129namespace {
3130// Check that the stack was in the old state (if generated code assertions are
3131// enabled), and switch to the new state.
3132void SwitchStackState(MacroAssembler* masm, Register stack, Register tmp,
3134 wasm::JumpBuffer::StackState new_state) {
3135 __ LoadU32(tmp, MemOperand(stack, wasm::kStackStateOffset));
3136 Label ok;
3137 __ JumpIfEqual(tmp, old_state, &ok);
3138 __ Trap();
3139 __ bind(&ok);
3140 __ mov(tmp, Operand(new_state));
3141 __ StoreU32(tmp, MemOperand(stack, wasm::kStackStateOffset));
3142}
3143
3144// Switch the stack pointer.
3145void SwitchStackPointer(MacroAssembler* masm, Register stack) {
3146 __ LoadU64(sp, MemOperand(stack, wasm::kStackSpOffset));
3147}
3148
3149void FillJumpBuffer(MacroAssembler* masm, Register stack, Label* target,
3150 Register tmp) {
3151 __ mov(tmp, sp);
3152 __ StoreU64(tmp, MemOperand(stack, wasm::kStackSpOffset));
3153 __ StoreU64(fp, MemOperand(stack, wasm::kStackFpOffset));
3154 __ LoadStackLimit(tmp, StackLimitKind::kRealStackLimit);
3155 __ StoreU64(tmp, MemOperand(stack, wasm::kStackLimitOffset));
3156
3157 __ GetLabelAddress(tmp, target);
3158 // Stash the address in the jump buffer.
3159 __ StoreU64(tmp, MemOperand(stack, wasm::kStackPcOffset));
3160}
3161
3162void LoadJumpBuffer(MacroAssembler* masm, Register stack, bool load_pc,
3163 Register tmp, wasm::JumpBuffer::StackState expected_state) {
3164 SwitchStackPointer(masm, stack);
3165 __ LoadU64(fp, MemOperand(stack, wasm::kStackFpOffset));
3166 SwitchStackState(masm, stack, tmp, expected_state, wasm::JumpBuffer::Active);
3167 if (load_pc) {
3168 __ LoadU64(tmp, MemOperand(stack, wasm::kStackPcOffset));
3169 __ Jump(tmp);
3170 }
3171 // The stack limit in StackGuard is set separately under the ExecutionAccess
3172 // lock.
3173}
3174
3175void LoadTargetJumpBuffer(MacroAssembler* masm, Register target_stack,
3176 Register tmp,
3177 wasm::JumpBuffer::StackState expected_state) {
3178 __ Zero(MemOperand(fp, StackSwitchFrameConstants::kGCScanSlotCountOffset));
3179 // Switch stack!
3180 LoadJumpBuffer(masm, target_stack, false, tmp, expected_state);
3181}
3182
3183// Updates the stack limit and central stack info, and validates the switch.
3184void SwitchStacks(MacroAssembler* masm, Register old_stack, bool return_switch,
3185 const std::initializer_list<Register> keep) {
3186 using ER = ExternalReference;
3187
3188 for (auto reg : keep) {
3189 __ Push(reg);
3190 }
3191
3192 {
3193 __ PrepareCallCFunction(2, r0);
3194 FrameScope scope(masm, StackFrame::MANUAL);
3195 // Move {old_stack} first in case it aliases kCArgRegs[0].
3196 __ Move(kCArgRegs[1], old_stack);
3197 __ Move(kCArgRegs[0], ExternalReference::isolate_address(masm->isolate()));
3198 __ CallCFunction(
3199 return_switch ? ER::wasm_return_switch() : ER::wasm_switch_stacks(), 2);
3200 }
3201
3202 for (auto it = std::rbegin(keep); it != std::rend(keep); ++it) {
3203 __ Pop(*it);
3204 }
3205}
3206
3207void ReloadParentStack(MacroAssembler* masm, Register return_reg,
3208 Register return_value, Register context, Register tmp1,
3209 Register tmp2, Register tmp3) {
3210 Register active_stack = tmp1;
3211 __ LoadRootRelative(active_stack, IsolateData::active_stack_offset());
3212
3213 // Set a null pointer in the jump buffer's SP slot to indicate to the stack
3214 // frame iterator that this stack is empty.
3215 __ Zero(MemOperand(active_stack, wasm::kStackSpOffset));
3216 {
3217 UseScratchRegisterScope temps(masm);
3218 Register scratch = temps.Acquire();
3219 SwitchStackState(masm, active_stack, scratch, wasm::JumpBuffer::Active,
3221 }
3222 Register parent = tmp2;
3223 __ LoadU64(parent, MemOperand(active_stack, wasm::kStackParentOffset));
3224
3225 // Update active stack.
3226 __ StoreRootRelative(IsolateData::active_stack_offset(), parent);
3227
3228 // Switch stack!
3229 SwitchStacks(masm, active_stack, true,
3230 {return_reg, return_value, context, parent});
3231 LoadJumpBuffer(masm, parent, false, tmp3, wasm::JumpBuffer::Inactive);
3232}
3233
3234void RestoreParentSuspender(MacroAssembler* masm, Register tmp1) {
3235 Register suspender = tmp1;
3236 __ LoadRoot(suspender, RootIndex::kActiveSuspender);
3237 __ LoadTaggedField(
3238 suspender,
3239 FieldMemOperand(suspender, WasmSuspenderObject::kParentOffset));
3240
3241 int32_t active_suspender_offset =
3243 RootIndex::kActiveSuspender);
3244 __ StoreU64(suspender, MemOperand(kRootRegister, active_suspender_offset));
3245}
3246
3247void ResetStackSwitchFrameStackSlots(MacroAssembler* masm) {
3248 __ Zero(MemOperand(fp, StackSwitchFrameConstants::kResultArrayOffset),
3249 MemOperand(fp, StackSwitchFrameConstants::kImplicitArgOffset));
3250}
3251
3252class RegisterAllocator {
3253 public:
3254 class Scoped {
3255 public:
3256 Scoped(RegisterAllocator* allocator, Register* reg)
3257 : allocator_(allocator), reg_(reg) {}
3258 ~Scoped() { allocator_->Free(reg_); }
3259
3260 private:
3261 RegisterAllocator* allocator_;
3262 Register* reg_;
3263 };
3264
3265 explicit RegisterAllocator(const RegList& registers)
3267 void Ask(Register* reg) {
3268 DCHECK_EQ(*reg, no_reg);
3269 DCHECK(!available_.is_empty());
3270 *reg = available_.PopFirst();
3271 allocated_registers_.push_back(reg);
3272 }
3273
3274 bool registerIsAvailable(const Register& reg) { return available_.has(reg); }
3275
3276 void Pinned(const Register& requested, Register* reg) {
3277 DCHECK(registerIsAvailable(requested));
3278 *reg = requested;
3279 Reserve(requested);
3280 allocated_registers_.push_back(reg);
3281 }
3282
3283 void Free(Register* reg) {
3284 DCHECK_NE(*reg, no_reg);
3285 available_.set(*reg);
3286 *reg = no_reg;
3288 find(allocated_registers_.begin(), allocated_registers_.end(), reg));
3289 }
3290
3291 void Reserve(const Register& reg) {
3292 if (reg == no_reg) {
3293 return;
3294 }
3295 DCHECK(registerIsAvailable(reg));
3296 available_.clear(reg);
3297 }
3298
3299 void Reserve(const Register& reg1, const Register& reg2,
3300 const Register& reg3 = no_reg, const Register& reg4 = no_reg,
3301 const Register& reg5 = no_reg, const Register& reg6 = no_reg) {
3302 Reserve(reg1);
3303 Reserve(reg2);
3304 Reserve(reg3);
3305 Reserve(reg4);
3306 Reserve(reg5);
3307 Reserve(reg6);
3308 }
3309
3310 bool IsUsed(const Register& reg) {
3311 return initial_.has(reg) && !registerIsAvailable(reg);
3312 }
3313
3314 void ResetExcept(const Register& reg1 = no_reg, const Register& reg2 = no_reg,
3315 const Register& reg3 = no_reg, const Register& reg4 = no_reg,
3316 const Register& reg5 = no_reg,
3317 const Register& reg6 = no_reg) {
3319 available_.clear(reg1);
3320 available_.clear(reg2);
3321 available_.clear(reg3);
3322 available_.clear(reg4);
3323 available_.clear(reg5);
3324 available_.clear(reg6);
3325
3326 auto it = allocated_registers_.begin();
3327 while (it != allocated_registers_.end()) {
3328 if (registerIsAvailable(**it)) {
3329 **it = no_reg;
3330 it = allocated_registers_.erase(it);
3331 } else {
3332 it++;
3333 }
3334 }
3335 }
3336
3337 static RegisterAllocator WithAllocatableGeneralRegisters() {
3338 RegList list;
3339 const RegisterConfiguration* config(RegisterConfiguration::Default());
3340
3341 for (int i = 0; i < config->num_allocatable_general_registers(); ++i) {
3342 int code = config->GetAllocatableGeneralCode(i);
3343 Register candidate = Register::from_code(code);
3344 list.set(candidate);
3345 }
3346 return RegisterAllocator(list);
3347 }
3348
3349 private:
3350 std::vector<Register*> allocated_registers_;
3351 const RegList initial_;
3353};
3354
3355#define DEFINE_REG(Name) \
3356 Register Name = no_reg; \
3357 regs.Ask(&Name);
3358
3359#define DEFINE_REG_W(Name) \
3360 DEFINE_REG(Name); \
3361 Name = Name.W();
3362
3363#define ASSIGN_REG(Name) regs.Ask(&Name);
3364
3365#define ASSIGN_REG_W(Name) \
3366 ASSIGN_REG(Name); \
3367 Name = Name.W();
3368
3369#define DEFINE_PINNED(Name, Reg) \
3370 Register Name = no_reg; \
3371 regs.Pinned(Reg, &Name);
3372
3373#define ASSIGN_PINNED(Name, Reg) regs.Pinned(Reg, &Name);
3374
3375#define DEFINE_SCOPED(Name) \
3376 DEFINE_REG(Name) \
3377 RegisterAllocator::Scoped scope_##Name(&regs, &Name);
3378
3379#define FREE_REG(Name) regs.Free(&Name);
3380
3381// Loads the context field of the WasmTrustedInstanceData or WasmImportData
3382// depending on the data's type, and places the result in the input register.
3383void GetContextFromImplicitArg(MacroAssembler* masm, Register data,
3384 Register scratch) {
3385 __ LoadTaggedField(scratch, FieldMemOperand(data, HeapObject::kMapOffset));
3386 __ CompareInstanceType(scratch, scratch, WASM_TRUSTED_INSTANCE_DATA_TYPE);
3387 Label instance;
3388 Label end;
3389 __ beq(&instance);
3390 __ LoadTaggedField(
3391 data, FieldMemOperand(data, WasmImportData::kNativeContextOffset));
3392 __ jmp(&end);
3393 __ bind(&instance);
3394 __ LoadTaggedField(
3395 data,
3396 FieldMemOperand(data, WasmTrustedInstanceData::kNativeContextOffset));
3397 __ bind(&end);
3398}
3399
3400} // namespace
3401
3402void Builtins::Generate_WasmToJsWrapperAsm(MacroAssembler* masm) {
3403 // Push registers in reverse order so that they are on the stack like
3404 // in an array, with the first item being at the lowest address.
3405 DoubleRegList fp_regs;
3406 for (DoubleRegister fp_param_reg : wasm::kFpParamRegisters) {
3407 fp_regs.set(fp_param_reg);
3408 }
3409 __ MultiPushDoubles(fp_regs);
3410
3411 // Push the GP registers in reverse order so that they are on the stack like
3412 // in an array, with the first item being at the lowest address.
3413 RegList gp_regs;
3414 for (size_t i = arraysize(wasm::kGpParamRegisters) - 1; i > 0; --i) {
3415 gp_regs.set(wasm::kGpParamRegisters[i]);
3416 }
3417 __ MultiPush(gp_regs);
3418 // Reserve a slot for the signature.
3419 __ Push(r2);
3420 __ TailCallBuiltin(Builtin::kWasmToJsWrapperCSA);
3421}
3422
3423void Builtins::Generate_WasmTrapHandlerLandingPad(MacroAssembler* masm) {
3424 __ Trap();
3425}
3426
3427void Builtins::Generate_WasmSuspend(MacroAssembler* masm) {
3428 auto regs = RegisterAllocator::WithAllocatableGeneralRegisters();
3429 // Set up the stackframe.
3430 __ EnterFrame(StackFrame::STACK_SWITCH);
3431
3432 DEFINE_PINNED(suspender, r2);
3434
3435 __ SubS64(
3436 sp, sp,
3437 Operand(StackSwitchFrameConstants::kNumSpillSlots * kSystemPointerSize));
3438 // Set a sentinel value for the spill slots visited by the GC.
3439 ResetStackSwitchFrameStackSlots(masm);
3440
3441 // -------------------------------------------
3442 // Save current state in active jump buffer.
3443 // -------------------------------------------
3444 Label resume;
3445 DEFINE_REG(stack);
3446 __ LoadRootRelative(stack, IsolateData::active_stack_offset());
3447 DEFINE_REG(scratch);
3448 FillJumpBuffer(masm, stack, &resume, scratch);
3449 SwitchStackState(masm, stack, scratch, wasm::JumpBuffer::Active,
3451 regs.ResetExcept(suspender, stack);
3452
3453 DEFINE_REG(suspender_stack);
3454 __ LoadU64(suspender_stack,
3455 FieldMemOperand(suspender, WasmSuspenderObject::kStackOffset));
3456 if (v8_flags.debug_code) {
3457 // -------------------------------------------
3458 // Check that the suspender's stack is the active stack.
3459 // -------------------------------------------
3460 // TODO(thibaudm): Once we add core stack-switching instructions, this
3461 // check will not hold anymore: it's possible that the active stack
3462 // changed (due to an internal switch), so we have to update the suspender.
3463 __ CmpS64(suspender_stack, stack);
3464 Label ok;
3465 __ beq(&ok);
3466 __ Trap();
3467 __ bind(&ok);
3468 }
3469 // -------------------------------------------
3470 // Update roots.
3471 // -------------------------------------------
3472 DEFINE_REG(caller);
3473 __ LoadU64(caller, MemOperand(suspender_stack, wasm::kStackParentOffset));
3474 __ StoreRootRelative(IsolateData::active_stack_offset(), caller);
3475 DEFINE_REG(parent);
3476 __ LoadTaggedField(
3477 parent, FieldMemOperand(suspender, WasmSuspenderObject::kParentOffset));
3478 int32_t active_suspender_offset =
3480 RootIndex::kActiveSuspender);
3481 __ StoreU64(parent, MemOperand(kRootRegister, active_suspender_offset));
3482 regs.ResetExcept(suspender, caller, stack);
3483
3484 // -------------------------------------------
3485 // Load jump buffer.
3486 // -------------------------------------------
3487 SwitchStacks(masm, stack, false, {caller, suspender});
3488 FREE_REG(stack);
3489 __ LoadTaggedField(
3491 FieldMemOperand(suspender, WasmSuspenderObject::kPromiseOffset));
3492 MemOperand GCScanSlotPlace =
3493 MemOperand(fp, StackSwitchFrameConstants::kGCScanSlotCountOffset);
3494 __ Zero(GCScanSlotPlace);
3495 ASSIGN_REG(scratch)
3496 LoadJumpBuffer(masm, caller, true, scratch, wasm::JumpBuffer::Inactive);
3497 if (v8_flags.debug_code) {
3498 __ Trap();
3499 }
3500 __ bind(&resume);
3501 __ LeaveFrame(StackFrame::STACK_SWITCH);
3502 __ b(r14);
3503}
3504
3505namespace {
3506// Resume the suspender stored in the closure. We generate two variants of this
3507// builtin: the onFulfilled variant resumes execution at the saved PC and
3508// forwards the value, the onRejected variant throws the value.
3509
3510void Generate_WasmResumeHelper(MacroAssembler* masm, wasm::OnResume on_resume) {
3511 auto regs = RegisterAllocator::WithAllocatableGeneralRegisters();
3512 __ EnterFrame(StackFrame::STACK_SWITCH);
3513
3514 DEFINE_PINNED(closure, kJSFunctionRegister); // r3
3515
3516 __ SubS64(
3517 sp, sp,
3518 Operand(StackSwitchFrameConstants::kNumSpillSlots * kSystemPointerSize));
3519 // Set a sentinel value for the spill slots visited by the GC.
3520 ResetStackSwitchFrameStackSlots(masm);
3521
3522 regs.ResetExcept(closure);
3523
3524 // -------------------------------------------
3525 // Load suspender from closure.
3526 // -------------------------------------------
3527 DEFINE_REG(sfi);
3528 __ LoadTaggedField(
3529 sfi,
3530 MemOperand(
3531 closure,
3533 FREE_REG(closure);
3534 // Suspender should be ObjectRegister register to be used in
3535 // RecordWriteField calls later.
3537 DEFINE_REG(resume_data);
3538 __ LoadTaggedField(
3539 resume_data,
3540 FieldMemOperand(sfi, SharedFunctionInfo::kUntrustedFunctionDataOffset));
3541 __ LoadTaggedField(
3542 suspender,
3543 FieldMemOperand(resume_data, WasmResumeData::kSuspenderOffset));
3544 regs.ResetExcept(suspender);
3545
3546 // -------------------------------------------
3547 // Save current state.
3548 // -------------------------------------------
3549 Label suspend;
3550 DEFINE_REG(active_stack);
3551 __ LoadRootRelative(active_stack, IsolateData::active_stack_offset());
3552 DEFINE_REG(scratch);
3553 FillJumpBuffer(masm, active_stack, &suspend, scratch);
3554 SwitchStackState(masm, active_stack, scratch, wasm::JumpBuffer::Active,
3556
3557 // -------------------------------------------
3558 // Set the suspender and stack parents and update the roots
3559 // -------------------------------------------
3560 DEFINE_REG(active_suspender);
3561 __ LoadRoot(active_suspender, RootIndex::kActiveSuspender);
3562 __ StoreTaggedField(
3563 active_suspender,
3564 FieldMemOperand(suspender, WasmSuspenderObject::kParentOffset));
3565 __ RecordWriteField(suspender, WasmSuspenderObject::kParentOffset,
3566 active_suspender, ip, kLRHasBeenSaved,
3568 int32_t active_suspender_offset =
3570 RootIndex::kActiveSuspender);
3571 __ StoreU64(suspender, MemOperand(kRootRegister, active_suspender_offset));
3572
3573 // Next line we are going to load a field from suspender, but we have to use
3574 // the same register for target_continuation to use it in RecordWriteField.
3575 // So, free suspender here to use pinned reg, but load from it next line.
3576 FREE_REG(suspender);
3577 DEFINE_REG(target_stack);
3578 suspender = target_stack;
3579 __ LoadU64(target_stack,
3580 FieldMemOperand(suspender, WasmSuspenderObject::kStackOffset));
3581 suspender = no_reg;
3582
3583 __ StoreRootRelative(IsolateData::active_stack_offset(), target_stack);
3584 SwitchStacks(masm, active_stack, false, {target_stack});
3585 regs.ResetExcept(target_stack);
3586
3587 // -------------------------------------------
3588 // Load state from target jmpbuf (longjmp).
3589 // -------------------------------------------
3590 regs.Reserve(kReturnRegister0);
3591 ASSIGN_REG(scratch);
3592 // Move resolved value to return register.
3594 MemOperand GCScanSlotPlace =
3595 MemOperand(fp, StackSwitchFrameConstants::kGCScanSlotCountOffset);
3596 __ Zero(GCScanSlotPlace);
3597 if (on_resume == wasm::OnResume::kThrow) {
3598 // Switch without restoring the PC.
3599 LoadJumpBuffer(masm, target_stack, false, scratch,
3601 // Pop this frame now. The unwinder expects that the first STACK_SWITCH
3602 // frame is the outermost one.
3603 __ LeaveFrame(StackFrame::STACK_SWITCH);
3604 // Forward the onRejected value to kThrow.
3605 __ Push(kReturnRegister0);
3606 __ CallRuntime(Runtime::kThrow);
3607 } else {
3608 // Resume the stack normally.
3609 LoadJumpBuffer(masm, target_stack, true, scratch,
3611 }
3612 if (v8_flags.debug_code) {
3613 __ Trap();
3614 }
3615 __ bind(&suspend);
3616 __ LeaveFrame(StackFrame::STACK_SWITCH);
3617 // Pop receiver + parameter.
3618 __ AddS64(sp, sp, Operand(2 * kSystemPointerSize));
3619 __ b(r14);
3620}
3621} // namespace
3622
3623void Builtins::Generate_WasmResume(MacroAssembler* masm) {
3624 Generate_WasmResumeHelper(masm, wasm::OnResume::kContinue);
3625}
3626
3627void Builtins::Generate_WasmReject(MacroAssembler* masm) {
3628 Generate_WasmResumeHelper(masm, wasm::OnResume::kThrow);
3629}
3630
3631void Builtins::Generate_WasmOnStackReplace(MacroAssembler* masm) {
3632 // Only needed on x64.
3633 __ Trap();
3634}
3635
3636namespace {
3637void SwitchToAllocatedStack(MacroAssembler* masm, RegisterAllocator& regs,
3638 Register wasm_instance, Register wrapper_buffer,
3639 Register& original_fp, Register& new_wrapper_buffer,
3640 Label* suspend) {
3641 ResetStackSwitchFrameStackSlots(masm);
3642 DEFINE_SCOPED(scratch)
3643 DEFINE_REG(target_stack)
3644 __ LoadRootRelative(target_stack, IsolateData::active_stack_offset());
3645 DEFINE_REG(parent_stack)
3646 __ LoadU64(parent_stack, MemOperand(target_stack, wasm::kStackParentOffset));
3647
3648 FillJumpBuffer(masm, parent_stack, suspend, scratch);
3649 SwitchStacks(masm, parent_stack, false, {wasm_instance, wrapper_buffer});
3650
3651 FREE_REG(parent_stack);
3652 // Save the old stack's fp in r13, and use it to access the parameters in
3653 // the parent frame.
3654 regs.Pinned(r13, &original_fp);
3655 __ Move(original_fp, fp);
3656 __ LoadRootRelative(target_stack, IsolateData::active_stack_offset());
3657 LoadTargetJumpBuffer(masm, target_stack, scratch,
3659 FREE_REG(target_stack);
3660
3661 // Push the loaded fp. We know it is null, because there is no frame yet,
3662 // so we could also push 0 directly. In any case we need to push it,
3663 // because this marks the base of the stack segment for
3664 // the stack frame iterator.
3665 __ EnterFrame(StackFrame::STACK_SWITCH);
3666
3667 int stack_space =
3668 RoundUp(StackSwitchFrameConstants::kNumSpillSlots * kSystemPointerSize +
3669 JSToWasmWrapperFrameConstants::kWrapperBufferSize,
3670 16);
3671 __ SubS64(sp, sp, Operand(stack_space));
3672 __ EnforceStackAlignment();
3673
3674 ASSIGN_REG(new_wrapper_buffer)
3675
3676 __ Move(new_wrapper_buffer, sp);
3677 // Copy data needed for return handling from old wrapper buffer to new one.
3678 // kWrapperBufferRefReturnCount will be copied too, because 8 bytes are copied
3679 // at the same time.
3680 static_assert(JSToWasmWrapperFrameConstants::kWrapperBufferRefReturnCount ==
3681 JSToWasmWrapperFrameConstants::kWrapperBufferReturnCount + 4);
3682
3683 __ LoadU64(
3684 scratch,
3685 MemOperand(wrapper_buffer,
3686 JSToWasmWrapperFrameConstants::kWrapperBufferReturnCount));
3687 __ StoreU64(
3688 scratch,
3689 MemOperand(new_wrapper_buffer,
3690 JSToWasmWrapperFrameConstants::kWrapperBufferReturnCount));
3691 __ LoadU64(
3692 scratch,
3693 MemOperand(
3694 wrapper_buffer,
3695 JSToWasmWrapperFrameConstants::kWrapperBufferSigRepresentationArray));
3696 __ StoreU64(
3697 scratch,
3698 MemOperand(
3699 new_wrapper_buffer,
3700 JSToWasmWrapperFrameConstants::kWrapperBufferSigRepresentationArray));
3701}
3702
3703void SwitchBackAndReturnPromise(MacroAssembler* masm, RegisterAllocator& regs,
3704 wasm::Promise mode, Label* return_promise) {
3705 regs.ResetExcept();
3706 // The return value of the wasm function becomes the parameter of the
3707 // FulfillPromise builtin, and the promise is the return value of this
3708 // wrapper.
3709 static const Builtin_FulfillPromise_InterfaceDescriptor desc;
3710 DEFINE_PINNED(promise, desc.GetRegisterParameter(0));
3711 DEFINE_PINNED(return_value, desc.GetRegisterParameter(1));
3712 DEFINE_SCOPED(tmp);
3713 DEFINE_SCOPED(tmp2);
3714 DEFINE_SCOPED(tmp3);
3715 if (mode == wasm::kPromise) {
3716 __ Move(return_value, kReturnRegister0);
3717 __ LoadRoot(promise, RootIndex::kActiveSuspender);
3718 __ LoadTaggedField(
3719 promise, FieldMemOperand(promise, WasmSuspenderObject::kPromiseOffset));
3720 }
3721
3722 __ LoadU64(kContextRegister,
3723 MemOperand(fp, StackSwitchFrameConstants::kImplicitArgOffset));
3724 GetContextFromImplicitArg(masm, kContextRegister, tmp);
3725
3726 ReloadParentStack(masm, promise, return_value, kContextRegister, tmp, tmp2,
3727 tmp3);
3728 RestoreParentSuspender(masm, tmp);
3729
3730 if (mode == wasm::kPromise) {
3731 __ mov(tmp, Operand(1));
3732 __ StoreU64(
3733 tmp, MemOperand(fp, StackSwitchFrameConstants::kGCScanSlotCountOffset));
3734 __ Push(promise);
3735 __ CallBuiltin(Builtin::kFulfillPromise);
3736 __ Pop(promise);
3737 }
3738 FREE_REG(promise);
3739 FREE_REG(return_value);
3740 __ bind(return_promise);
3741}
3742
3743void GenerateExceptionHandlingLandingPad(MacroAssembler* masm,
3744 RegisterAllocator& regs,
3745 Label* return_promise) {
3746 regs.ResetExcept();
3747 static const Builtin_RejectPromise_InterfaceDescriptor desc;
3748 DEFINE_PINNED(promise, desc.GetRegisterParameter(0));
3749 DEFINE_PINNED(reason, desc.GetRegisterParameter(1));
3750 DEFINE_PINNED(debug_event, desc.GetRegisterParameter(2));
3751 int catch_handler = __ pc_offset();
3752
3753 DEFINE_SCOPED(thread_in_wasm_flag_addr);
3754 thread_in_wasm_flag_addr = r4;
3755
3756 // Unset thread_in_wasm_flag.
3757 __ LoadU64(
3758 thread_in_wasm_flag_addr,
3760 __ mov(r0, Operand(0));
3761 __ StoreU32(r0, MemOperand(thread_in_wasm_flag_addr, 0), no_reg);
3762
3763 // The exception becomes the parameter of the RejectPromise builtin, and the
3764 // promise is the return value of this wrapper.
3765 __ Move(reason, kReturnRegister0);
3766 __ LoadRoot(promise, RootIndex::kActiveSuspender);
3767 __ LoadTaggedField(
3768 promise, FieldMemOperand(promise, WasmSuspenderObject::kPromiseOffset));
3769
3770 DEFINE_SCOPED(tmp);
3771 DEFINE_SCOPED(tmp2);
3772 DEFINE_SCOPED(tmp3);
3773 __ LoadU64(kContextRegister,
3774 MemOperand(fp, StackSwitchFrameConstants::kImplicitArgOffset));
3775 GetContextFromImplicitArg(masm, kContextRegister, tmp);
3776 ReloadParentStack(masm, promise, reason, kContextRegister, tmp, tmp2, tmp3);
3777 RestoreParentSuspender(masm, tmp);
3778
3779 __ mov(tmp, Operand(1));
3780 __ StoreU64(
3781 tmp, MemOperand(fp, StackSwitchFrameConstants::kGCScanSlotCountOffset));
3782 __ Push(promise);
3783 __ LoadRoot(debug_event, RootIndex::kTrueValue);
3784 __ CallBuiltin(Builtin::kRejectPromise);
3785 __ Pop(promise);
3786
3787 // Run the rest of the wrapper normally (deconstruct the frame, ...).
3788 __ b(return_promise);
3789
3790 masm->isolate()->builtins()->SetJSPIPromptHandlerOffset(catch_handler);
3791}
3792
3793void JSToWasmWrapperHelper(MacroAssembler* masm, wasm::Promise mode) {
3794 bool stack_switch = mode == wasm::kPromise || mode == wasm::kStressSwitch;
3795 auto regs = RegisterAllocator::WithAllocatableGeneralRegisters();
3796 __ EnterFrame(stack_switch ? StackFrame::STACK_SWITCH
3797 : StackFrame::JS_TO_WASM);
3798
3799 __ AllocateStackSpace(StackSwitchFrameConstants::kNumSpillSlots *
3801
3802 // Load the implicit argument (instance data or import data) from the frame.
3804 __ LoadU64(implicit_arg,
3805 MemOperand(fp, JSToWasmWrapperFrameConstants::kImplicitArgOffset));
3806
3807 DEFINE_PINNED(wrapper_buffer,
3809
3810 Label suspend;
3811 Register original_fp = no_reg;
3812 Register new_wrapper_buffer = no_reg;
3813 if (stack_switch) {
3814 SwitchToAllocatedStack(masm, regs, implicit_arg, wrapper_buffer,
3815 original_fp, new_wrapper_buffer, &suspend);
3816 } else {
3817 original_fp = fp;
3818 new_wrapper_buffer = wrapper_buffer;
3819 }
3820
3821 regs.ResetExcept(original_fp, wrapper_buffer, implicit_arg,
3822 new_wrapper_buffer);
3823
3824 {
3825 __ StoreU64(
3826 new_wrapper_buffer,
3827 MemOperand(fp, JSToWasmWrapperFrameConstants::kWrapperBufferOffset));
3828 if (stack_switch) {
3829 __ StoreU64(
3830 implicit_arg,
3831 MemOperand(fp, StackSwitchFrameConstants::kImplicitArgOffset));
3832 DEFINE_SCOPED(scratch)
3833 __ LoadU64(
3834 scratch,
3835 MemOperand(original_fp,
3836 JSToWasmWrapperFrameConstants::kResultArrayParamOffset));
3837 __ StoreU64(
3838 scratch,
3839 MemOperand(fp, StackSwitchFrameConstants::kResultArrayOffset));
3840 }
3841 }
3842 {
3843 DEFINE_SCOPED(result_size);
3844 __ LoadU64(
3845 result_size,
3846 MemOperand(wrapper_buffer, JSToWasmWrapperFrameConstants::
3847 kWrapperBufferStackReturnBufferSize));
3848 __ ShiftLeftU64(r0, result_size, Operand(kSystemPointerSizeLog2));
3849 __ SubS64(sp, sp, r0);
3850 }
3851
3852 __ StoreU64(
3853 sp,
3854 MemOperand(
3855 new_wrapper_buffer,
3856 JSToWasmWrapperFrameConstants::kWrapperBufferStackReturnBufferStart));
3857
3858 if (stack_switch) {
3859 FREE_REG(new_wrapper_buffer)
3860 }
3861 FREE_REG(implicit_arg)
3862 for (auto reg : wasm::kGpParamRegisters) {
3863 regs.Reserve(reg);
3864 }
3865
3866 // The first GP parameter holds the trusted instance data or the import data.
3867 // This is handled specially.
3868 int stack_params_offset =
3871
3872 {
3873 Register params_start = ip;
3874 __ LoadU64(
3875 params_start,
3876 MemOperand(wrapper_buffer,
3877 JSToWasmWrapperFrameConstants::kWrapperBufferParamStart));
3878 {
3879 // Push stack parameters on the stack.
3880 DEFINE_SCOPED(params_end);
3881 __ LoadU64(
3882 params_end,
3883 MemOperand(wrapper_buffer,
3884 JSToWasmWrapperFrameConstants::kWrapperBufferParamEnd));
3885 DEFINE_SCOPED(last_stack_param);
3886
3887 __ AddS64(last_stack_param, params_start, Operand(stack_params_offset));
3888 Label loop_start;
3889 __ bind(&loop_start);
3890
3891 Label finish_stack_params;
3892 __ CmpS64(last_stack_param, params_end);
3893 __ bge(&finish_stack_params);
3894
3895 // Push parameter
3896 {
3897 __ AddS64(params_end, params_end, Operand(-kSystemPointerSize));
3898 __ LoadU64(r0, MemOperand(params_end));
3899 __ push(r0);
3900 }
3901 __ jmp(&loop_start);
3902
3903 __ bind(&finish_stack_params);
3904 }
3905
3906 size_t next_offset = 0;
3907 for (size_t i = 1; i < arraysize(wasm::kGpParamRegisters); i++) {
3908 // Check that {params_start} does not overlap with any of the parameter
3909 // registers, so that we don't overwrite it by accident with the loads
3910 // below.
3911 DCHECK_NE(params_start, wasm::kGpParamRegisters[i]);
3912 __ LoadU64(wasm::kGpParamRegisters[i],
3913 MemOperand(params_start, next_offset));
3914 next_offset += kSystemPointerSize;
3915 }
3916
3917 for (size_t i = 0; i < arraysize(wasm::kFpParamRegisters); i++) {
3918 __ LoadF64(wasm::kFpParamRegisters[i],
3919 MemOperand(params_start, next_offset));
3920 next_offset += kDoubleSize;
3921 }
3922 DCHECK_EQ(next_offset, stack_params_offset);
3923 }
3924
3925 {
3926 DEFINE_SCOPED(thread_in_wasm_flag_addr);
3927 __ LoadU64(thread_in_wasm_flag_addr,
3930 DEFINE_SCOPED(scratch);
3931 __ mov(scratch, Operand(1));
3932 __ StoreU32(scratch, MemOperand(thread_in_wasm_flag_addr, 0), no_reg);
3933 }
3934
3935 __ Zero(MemOperand(fp, StackSwitchFrameConstants::kGCScanSlotCountOffset));
3936 {
3937 DEFINE_SCOPED(call_target);
3938 __ LoadWasmCodePointer(
3939 call_target,
3940 MemOperand(wrapper_buffer,
3941 JSToWasmWrapperFrameConstants::kWrapperBufferCallTarget));
3942 __ CallWasmCodePointer(call_target);
3943 }
3944
3945 regs.ResetExcept();
3946 // The wrapper_buffer has to be in r4 as the correct parameter register.
3947 regs.Reserve(kReturnRegister0, kReturnRegister1);
3948 ASSIGN_PINNED(wrapper_buffer, r4);
3949 {
3950 DEFINE_SCOPED(thread_in_wasm_flag_addr);
3951 __ LoadU64(thread_in_wasm_flag_addr,
3954 __ mov(r0, Operand(0));
3955 __ StoreU32(r0, MemOperand(thread_in_wasm_flag_addr, 0), no_reg);
3956 }
3957
3958 __ LoadU64(
3959 wrapper_buffer,
3960 MemOperand(fp, JSToWasmWrapperFrameConstants::kWrapperBufferOffset));
3961
3962 __ StoreF64(
3964 MemOperand(
3965 wrapper_buffer,
3966 JSToWasmWrapperFrameConstants::kWrapperBufferFPReturnRegister1));
3967 __ StoreF64(
3969 MemOperand(
3970 wrapper_buffer,
3971 JSToWasmWrapperFrameConstants::kWrapperBufferFPReturnRegister2));
3972 __ StoreU64(
3974 MemOperand(
3975 wrapper_buffer,
3976 JSToWasmWrapperFrameConstants::kWrapperBufferGPReturnRegister1));
3977 __ StoreU64(
3979 MemOperand(
3980 wrapper_buffer,
3981 JSToWasmWrapperFrameConstants::kWrapperBufferGPReturnRegister2));
3982 // Call the return value builtin with
3983 // r2: wasm instance.
3984 // r3: the result JSArray for multi-return.
3985 // r4: pointer to the byte buffer which contains all parameters.
3986 if (stack_switch) {
3987 __ LoadU64(r3,
3988 MemOperand(fp, StackSwitchFrameConstants::kResultArrayOffset));
3989 __ LoadU64(r2,
3990 MemOperand(fp, StackSwitchFrameConstants::kImplicitArgOffset));
3991 } else {
3992 __ LoadU64(
3993 r3,
3994 MemOperand(fp, JSToWasmWrapperFrameConstants::kResultArrayParamOffset));
3995 __ LoadU64(
3996 r2, MemOperand(fp, JSToWasmWrapperFrameConstants::kImplicitArgOffset));
3997 }
3998 Register scratch = r5;
3999 GetContextFromImplicitArg(masm, r2, scratch);
4000
4001 __ CallBuiltin(Builtin::kJSToWasmHandleReturns);
4002
4003 Label return_promise;
4004 if (stack_switch) {
4005 SwitchBackAndReturnPromise(masm, regs, mode, &return_promise);
4006 }
4007 __ bind(&suspend);
4008
4009 __ LeaveFrame(stack_switch ? StackFrame::STACK_SWITCH
4010 : StackFrame::JS_TO_WASM);
4011 // Despite returning to the different location for regular and stack switching
4012 // versions, incoming argument count matches both cases:
4013 // instance and result array without suspend or
4014 // or promise resolve/reject params for callback.
4015 __ AddS64(sp, sp, Operand(2 * kSystemPointerSize));
4016 __ b(r14);
4017
4018 // Catch handler for the stack-switching wrapper: reject the promise with the
4019 // thrown exception.
4020 if (mode == wasm::kPromise) {
4021 GenerateExceptionHandlingLandingPad(masm, regs, &return_promise);
4022 }
4023}
4024} // namespace
4025
4026void Builtins::Generate_JSToWasmWrapperAsm(MacroAssembler* masm) {
4027 JSToWasmWrapperHelper(masm, wasm::kNoPromise);
4028}
4029
4030void Builtins::Generate_WasmReturnPromiseOnSuspendAsm(MacroAssembler* masm) {
4031 JSToWasmWrapperHelper(masm, wasm::kPromise);
4032}
4033
4034void Builtins::Generate_JSToWasmStressSwitchStacksAsm(MacroAssembler* masm) {
4035 JSToWasmWrapperHelper(masm, wasm::kStressSwitch);
4036}
4037
4038namespace {
4039
4040static constexpr Register kOldSPRegister = r13;
4041
4042void SwitchToTheCentralStackIfNeeded(MacroAssembler* masm, Register argc_input,
4043 Register target_input,
4044 Register argv_input) {
4045 using ER = ExternalReference;
4046
4047 // kOldSPRegister used as a switch flag, if it is zero - no switch performed
4048 // if it is not zero, it contains old sp value.
4049 __ mov(kOldSPRegister, Operand(0));
4050
4051 // Using r4 & r5 as temporary registers, because they will be rewritten
4052 // before exiting to native code anyway.
4053
4054 ER on_central_stack_flag_loc = ER::Create(
4055 IsolateAddressId::kIsOnCentralStackFlagAddress, masm->isolate());
4056 __ Move(r1, on_central_stack_flag_loc);
4057 __ LoadU8(r1, MemOperand(r1));
4058
4059 Label do_not_need_to_switch;
4060 __ CmpU32(r1, Operand(0));
4061 __ bne(&do_not_need_to_switch);
4062
4063 // Switch to central stack.
4064
4065 __ Move(kOldSPRegister, sp);
4066
4067 Register central_stack_sp = r4;
4068 DCHECK(!AreAliased(central_stack_sp, argc_input, argv_input, target_input));
4069 {
4070 __ Push(argc_input);
4071 __ Push(target_input);
4072 __ Push(argv_input);
4073 __ PrepareCallCFunction(2, r0);
4074 __ Move(kCArgRegs[0], ER::isolate_address());
4075 __ Move(kCArgRegs[1], kOldSPRegister);
4076 __ CallCFunction(ER::wasm_switch_to_the_central_stack(), 2,
4078 __ Move(central_stack_sp, kReturnRegister0);
4079 __ Pop(argv_input);
4080 __ Pop(target_input);
4081 __ Pop(argc_input);
4082 }
4083
4084 static constexpr int kReturnAddressSlotOffset = 1 * kSystemPointerSize;
4085 static constexpr int kPadding = 1 * kSystemPointerSize;
4086 __ SubS64(sp, central_stack_sp, Operand(kReturnAddressSlotOffset + kPadding));
4087 __ EnforceStackAlignment();
4088
4089 // When we switch stack we leave home space allocated on the old stack.
4090 // Allocate home space on the central stack to prevent stack corruption.
4091 // zLinux ABI requires caller's frame to have sufficient space for callee
4092 // preserved register save area.
4094
4095 // Update the sp saved in the frame.
4096 // It will be used to calculate the callee pc during GC.
4097 // The pc is going to be on the new stack segment, so rewrite it here.
4098 __ AddS64(central_stack_sp, sp, kStackFrameSPSlot * kSystemPointerSize);
4099 __ StoreU64(central_stack_sp, MemOperand(fp, ExitFrameConstants::kSPOffset));
4100
4101 __ bind(&do_not_need_to_switch);
4102}
4103
4104void SwitchFromTheCentralStackIfNeeded(MacroAssembler* masm) {
4105 using ER = ExternalReference;
4106
4107 Label no_stack_change;
4108
4109 __ CmpU64(kOldSPRegister, Operand(0));
4110 __ beq(&no_stack_change);
4111 __ Move(sp, kOldSPRegister);
4112
4113 {
4115 __ PrepareCallCFunction(1, r0);
4116 __ Move(kCArgRegs[0], ER::isolate_address());
4117 __ CallCFunction(ER::wasm_switch_from_the_central_stack(), 1,
4120 }
4121
4122 __ bind(&no_stack_change);
4123}
4124
4125} // namespace
4126
4127#endif // V8_ENABLE_WEBASSEMBLY
4128
4129void Builtins::Generate_CEntry(MacroAssembler* masm, int result_size,
4130 ArgvMode argv_mode, bool builtin_exit_frame,
4131 bool switch_to_central_stack) {
4132 // Called from JavaScript; parameters are on stack as if calling JS function.
4133 // r2: number of arguments including receiver
4134 // r3: pointer to builtin function
4135 // fp: frame pointer (restored after C call)
4136 // sp: stack pointer (restored as callee's sp after C call)
4137 // cp: current context (C callee-saved)
4138 //
4139 // If argv_mode == ArgvMode::kRegister:
4140 // r4: pointer to the first argument
4141
4142 using ER = ExternalReference;
4143
4144 // Move input arguments to more convenient registers.
4145 static constexpr Register argc_input = r2;
4146 static constexpr Register target_fun = r7; // C callee-saved
4147 static constexpr Register argv = r3;
4148 static constexpr Register scratch = ip;
4149#if V8_OS_ZOS
4150 static constexpr Register argc_sav = r9; // C callee-saved
4151#else
4152 static constexpr Register argc_sav = r6; // C callee-saved
4153#endif
4154
4155 __ mov(target_fun, argv);
4156
4157 if (argv_mode == ArgvMode::kRegister) {
4158 // Move argv into the correct register.
4159 __ mov(argv, r4);
4160 } else {
4161 // Compute the argv pointer.
4162 __ ShiftLeftU64(argv, argc_input, Operand(kSystemPointerSizeLog2));
4163 __ lay(argv, MemOperand(argv, sp, -kSystemPointerSize));
4164 }
4165
4166 // Enter the exit frame that transitions from JavaScript to C++.
4167 FrameScope scope(masm, StackFrame::MANUAL);
4168
4169 int arg_stack_space = 0;
4170
4171 // Pass buffer for return value on stack if necessary
4172 bool needs_return_buffer =
4173 result_size == 2 && !ABI_RETURNS_OBJECTPAIR_IN_REGS;
4174 if (needs_return_buffer) {
4175 arg_stack_space += result_size;
4176 }
4177
4178 // 64-bit linux pass Argument object by reference not value
4179 arg_stack_space += 2;
4180
4181 __ EnterExitFrame(
4182 scratch, arg_stack_space,
4183 builtin_exit_frame ? StackFrame::BUILTIN_EXIT : StackFrame::EXIT);
4184
4185 // Store a copy of argc, argv in callee-saved registers for later.
4186 __ mov(argc_sav, argc_input);
4187 __ mov(r8, argv);
4188 // r2: number of arguments including receiver
4189 // r6: number of arguments including receiver (C callee-saved)
4190 // r3, r8: pointer to the first argument
4191 // r7: pointer to builtin function (C callee-saved)
4192
4193 // Result returned in registers or stack, depending on result size and ABI.
4194
4195 Register isolate_reg = r4;
4196 if (needs_return_buffer) {
4197 // The return value is 16-byte non-scalar value.
4198 // Use frame storage reserved by calling function to pass return
4199 // buffer as implicit first argument in R2. Shfit original parameters
4200 // by one register each.
4201 __ mov(r4, r3);
4202 __ mov(r3, r2);
4203 __ la(r2,
4205 isolate_reg = r5;
4206 // Clang doesn't preserve r2 (result buffer)
4207 // write to r8 (preserved) before entry
4208 __ mov(r8, r2);
4209 }
4210
4211#if V8_ENABLE_WEBASSEMBLY
4212 if (switch_to_central_stack) {
4213 SwitchToTheCentralStackIfNeeded(masm, argc_input, target_fun, argv);
4214 }
4215#endif // V8_ENABLE_WEBASSEMBLY
4216
4217 // Call C built-in.
4218 __ Move(isolate_reg, ER::isolate_address());
4219
4220#if V8_OS_ZOS
4221 // Shuffle input arguments to match XPLINK ABI
4222 __ mov(r1, r2);
4223 __ mov(r2, r3);
4224 __ mov(r3, r4);
4225 // Save stack arguments to XPLINK extra param slot
4226 const int stack_args = 3;
4227 const int stack_space = kXPLINKStackFrameExtraParamSlot + stack_args;
4228 __ lay(r4, MemOperand(sp, -((stack_space * kSystemPointerSize) +
4229 kStackPointerBias)));
4230 __ StoreMultipleP(
4231 r5, target_fun,
4232 MemOperand(r4, kStackPointerBias +
4233 kXPLINKStackFrameExtraParamSlot * kSystemPointerSize));
4234 // Load environment from slot 0 of fn desc.
4235 __ LoadU64(r5, MemOperand(target_fun));
4236 // Load function pointer from slot 1 of fn desc.
4237 __ LoadU64(r8, MemOperand(target_fun, kSystemPointerSize));
4238 __ StoreReturnAddressAndCall(r8);
4239
4240 // r9 and r13 are used to store argc and argv on z/OS instead
4241 // of r6 and r8 since r6 is not callee saved.
4242 __ mov(r6, r9);
4243 __ mov(r8, r13);
4244
4245 // Shuffler arguments based on result_size to match XPLINK ABI
4246 if (result_size == 1) {
4247 __ mov(r2, r3);
4248 } else if (result_size == 2) {
4249 __ mov(r3, r2);
4250 __ mov(r2, r1);
4251 } else {
4252 __ mov(r4, r3);
4253 __ mov(r3, r2);
4254 __ mov(r2, r1);
4255 }
4256#else
4257 __ StoreReturnAddressAndCall(target_fun);
4258
4259 // If return value is on the stack, pop it to registers.
4260 if (needs_return_buffer) {
4261 __ mov(r2, r8);
4262 __ LoadU64(r3, MemOperand(r2, kSystemPointerSize));
4263 __ LoadU64(r2, MemOperand(r2));
4264 }
4265#endif
4266
4267 // Check result for exception sentinel.
4268 Label exception_returned;
4269 __ CompareRoot(r2, RootIndex::kException);
4270 __ beq(&exception_returned, Label::kNear);
4271
4272#if V8_ENABLE_WEBASSEMBLY
4273 if (switch_to_central_stack) {
4274 SwitchFromTheCentralStackIfNeeded(masm);
4275 }
4276#endif // V8_ENABLE_WEBASSEMBLY
4277
4278 // Check that there is no exception, otherwise we
4279 // should have returned the exception sentinel.
4280 if (v8_flags.debug_code) {
4281 Label okay;
4282 ER exception_address =
4283 ER::Create(IsolateAddressId::kExceptionAddress, masm->isolate());
4284 __ LoadU64(scratch,
4285 __ ExternalReferenceAsOperand(exception_address, no_reg));
4286 __ CompareRoot(scratch, RootIndex::kTheHoleValue);
4287 // Cannot use check here as it attempts to generate call into runtime.
4288 __ beq(&okay, Label::kNear);
4289 __ stop();
4290 __ bind(&okay);
4291 }
4292
4293 // Exit C frame and return.
4294 // r2:r3: result
4295 // sp: stack pointer
4296 // fp: frame pointer
4297 // r6: still holds argc (C caller-saved).
4298 __ LeaveExitFrame(scratch);
4299 if (argv_mode == ArgvMode::kStack) {
4300 DCHECK(!AreAliased(scratch, argc_sav));
4301 __ ShiftLeftU64(scratch, argc_sav, Operand(kSystemPointerSizeLog2));
4302 __ AddS64(sp, sp, scratch);
4303 }
4304
4305 __ b(r14);
4306
4307 // Handling of exception.
4308 __ bind(&exception_returned);
4309
4310 ER pending_handler_context_address = ER::Create(
4311 IsolateAddressId::kPendingHandlerContextAddress, masm->isolate());
4312 ER pending_handler_entrypoint_address = ER::Create(
4313 IsolateAddressId::kPendingHandlerEntrypointAddress, masm->isolate());
4314 ER pending_handler_fp_address =
4315 ER::Create(IsolateAddressId::kPendingHandlerFPAddress, masm->isolate());
4316 ER pending_handler_sp_address =
4317 ER::Create(IsolateAddressId::kPendingHandlerSPAddress, masm->isolate());
4318
4319 // Ask the runtime for help to determine the handler. This will set r3 to
4320 // contain the current exception, don't clobber it.
4321 {
4322 FrameScope scope(masm, StackFrame::MANUAL);
4323 __ PrepareCallCFunction(3, 0, r2);
4324 __ mov(kCArgRegs[0], Operand::Zero());
4325 __ mov(kCArgRegs[1], Operand::Zero());
4326 __ Move(kCArgRegs[2], ER::isolate_address());
4327 __ CallCFunction(ER::Create(Runtime::kUnwindAndFindExceptionHandler), 3,
4329 }
4330
4331 // Retrieve the handler context, SP and FP.
4332 __ Move(cp, pending_handler_context_address);
4333 __ LoadU64(cp, MemOperand(cp));
4334 __ Move(sp, pending_handler_sp_address);
4335 __ LoadU64(sp, MemOperand(sp));
4336 __ Move(fp, pending_handler_fp_address);
4337 __ LoadU64(fp, MemOperand(fp));
4338
4339 // If the handler is a JS frame, restore the context to the frame. Note that
4340 // the context will be set to (cp == 0) for non-JS frames.
4341 Label skip;
4342 __ CmpS64(cp, Operand::Zero());
4343 __ beq(&skip, Label::kNear);
4345 __ bind(&skip);
4346
4347 // Clear c_entry_fp, like we do in `LeaveExitFrame`.
4348 ER c_entry_fp_address =
4349 ER::Create(IsolateAddressId::kCEntryFPAddress, masm->isolate());
4350 __ mov(scratch, Operand::Zero());
4351 __ StoreU64(scratch,
4352 __ ExternalReferenceAsOperand(c_entry_fp_address, no_reg));
4353
4354 // Compute the handler entry address and jump to it.
4355 __ LoadU64(scratch, __ ExternalReferenceAsOperand(
4356 pending_handler_entrypoint_address, no_reg));
4357 __ Jump(scratch);
4358}
4359
4360#if V8_ENABLE_WEBASSEMBLY
4361void Builtins::Generate_WasmHandleStackOverflow(MacroAssembler* masm) {
4362 using ER = ExternalReference;
4363 Register frame_base = WasmHandleStackOverflowDescriptor::FrameBaseRegister();
4365 {
4366 DCHECK_NE(kCArgRegs[1], frame_base);
4367 DCHECK_NE(kCArgRegs[3], frame_base);
4368 __ mov(kCArgRegs[3], gap);
4369 __ mov(kCArgRegs[1], sp);
4370 __ SubS64(kCArgRegs[2], frame_base, kCArgRegs[1]);
4371 __ mov(kCArgRegs[4], fp);
4372 FrameScope scope(masm, StackFrame::INTERNAL);
4373 __ push(kCArgRegs[3]);
4374 __ PrepareCallCFunction(5, r0);
4375 __ Move(kCArgRegs[0], ER::isolate_address());
4376 __ CallCFunction(ER::wasm_grow_stack(), 5);
4377 __ pop(gap);
4379 }
4380 Label call_runtime;
4381 // wasm_grow_stack returns zero if it cannot grow a stack.
4382 __ CmpU64(kReturnRegister0, Operand(0));
4383 __ beq(&call_runtime);
4384
4385 // Calculate old FP - SP offset to adjust FP accordingly to new SP.
4386 __ SubS64(fp, fp, sp);
4387 __ AddS64(fp, fp, kReturnRegister0);
4388 __ mov(sp, kReturnRegister0);
4389 {
4390 UseScratchRegisterScope temps(masm);
4391 Register scratch = temps.Acquire();
4392 __ mov(scratch,
4393 Operand(StackFrame::TypeToMarker(StackFrame::WASM_SEGMENT_START)));
4394 __ StoreU64(scratch, MemOperand(fp, TypedFrameConstants::kFrameTypeOffset));
4395 }
4396 __ Ret();
4397
4398 __ bind(&call_runtime);
4399 // If wasm_grow_stack returns zero interruption or stack overflow
4400 // should be handled by runtime call.
4401 {
4403 MemOperand(fp, WasmFrameConstants::kWasmInstanceDataOffset));
4404 __ LoadTaggedField(
4406 WasmTrustedInstanceData::kNativeContextOffset));
4407 FrameScope scope(masm, StackFrame::MANUAL);
4408 __ EnterFrame(StackFrame::INTERNAL);
4409 __ SmiTag(gap);
4410 __ push(gap);
4411 __ CallRuntime(Runtime::kWasmStackGuard);
4412 __ LeaveFrame(StackFrame::INTERNAL);
4413 __ Ret();
4414 }
4415}
4416#endif // V8_ENABLE_WEBASSEMBLY
4417
4418void Builtins::Generate_DoubleToI(MacroAssembler* masm) {
4419 Label out_of_range, only_low, negate, done, fastpath_done;
4420 Register result_reg = r2;
4421
4422 HardAbortScope hard_abort(masm); // Avoid calls to Abort.
4423
4424 // Immediate values for this stub fit in instructions, so it's safe to use ip.
4425 Register scratch = GetRegisterThatIsNotOneOf(result_reg);
4426 Register scratch_low = GetRegisterThatIsNotOneOf(result_reg, scratch);
4427 Register scratch_high =
4428 GetRegisterThatIsNotOneOf(result_reg, scratch, scratch_low);
4429 DoubleRegister double_scratch = kScratchDoubleReg;
4430
4431 __ Push(result_reg, scratch);
4432 // Account for saved regs.
4433 int argument_offset = 2 * kSystemPointerSize;
4434
4435 // Load double input.
4436 __ LoadF64(double_scratch, MemOperand(sp, argument_offset));
4437
4438 // Do fast-path convert from double to int.
4439 __ ConvertDoubleToInt64(result_reg, double_scratch);
4440
4441 // Test for overflow
4442 __ TestIfInt32(result_reg);
4443 __ beq(&fastpath_done, Label::kNear);
4444
4445 __ Push(scratch_high, scratch_low);
4446 // Account for saved regs.
4447 argument_offset += 2 * kSystemPointerSize;
4448
4449 __ LoadU32(scratch_high,
4450 MemOperand(sp, argument_offset + Register::kExponentOffset));
4451 __ LoadU32(scratch_low,
4452 MemOperand(sp, argument_offset + Register::kMantissaOffset));
4453
4454 __ ExtractBitMask(scratch, scratch_high, HeapNumber::kExponentMask);
4455 // Load scratch with exponent - 1. This is faster than loading
4456 // with exponent because Bias + 1 = 1024 which is a *S390* immediate value.
4457 static_assert(HeapNumber::kExponentBias + 1 == 1024);
4458 __ SubS64(scratch, Operand(HeapNumber::kExponentBias + 1));
4459 // If exponent is greater than or equal to 84, the 32 less significant
4460 // bits are 0s (2^84 = 1, 52 significant bits, 32 uncoded bits),
4461 // the result is 0.
4462 // Compare exponent with 84 (compare exponent - 1 with 83).
4463 __ CmpS64(scratch, Operand(83));
4464 __ bge(&out_of_range, Label::kNear);
4465
4466 // If we reach this code, 31 <= exponent <= 83.
4467 // So, we don't have to handle cases where 0 <= exponent <= 20 for
4468 // which we would need to shift right the high part of the mantissa.
4469 // Scratch contains exponent - 1.
4470 // Load scratch with 52 - exponent (load with 51 - (exponent - 1)).
4471 __ mov(r0, Operand(51));
4472 __ SubS64(scratch, r0, scratch);
4473 __ CmpS64(scratch, Operand::Zero());
4474 __ ble(&only_low, Label::kNear);
4475 // 21 <= exponent <= 51, shift scratch_low and scratch_high
4476 // to generate the result.
4477 __ ShiftRightU32(scratch_low, scratch_low, scratch);
4478 // Scratch contains: 52 - exponent.
4479 // We needs: exponent - 20.
4480 // So we use: 32 - scratch = 32 - 52 + exponent = exponent - 20.
4481 __ mov(r0, Operand(32));
4482 __ SubS64(scratch, r0, scratch);
4483 __ ExtractBitMask(result_reg, scratch_high, HeapNumber::kMantissaMask);
4484 // Set the implicit 1 before the mantissa part in scratch_high.
4485 static_assert(HeapNumber::kMantissaBitsInTopWord >= 16);
4486 __ mov(r0, Operand(1 << ((HeapNumber::kMantissaBitsInTopWord)-16)));
4487 __ ShiftLeftU64(r0, r0, Operand(16));
4488 __ OrP(result_reg, result_reg, r0);
4489 __ ShiftLeftU32(r0, result_reg, scratch);
4490 __ OrP(result_reg, scratch_low, r0);
4491 __ b(&negate, Label::kNear);
4492
4493 __ bind(&out_of_range);
4494 __ mov(result_reg, Operand::Zero());
4495 __ b(&done, Label::kNear);
4496
4497 __ bind(&only_low);
4498 // 52 <= exponent <= 83, shift only scratch_low.
4499 // On entry, scratch contains: 52 - exponent.
4500 __ lcgr(scratch, scratch);
4501 __ ShiftLeftU32(result_reg, scratch_low, scratch);
4502
4503 __ bind(&negate);
4504 // If input was positive, scratch_high ASR 31 equals 0 and
4505 // scratch_high LSR 31 equals zero.
4506 // New result = (result eor 0) + 0 = result.
4507 // If the input was negative, we have to negate the result.
4508 // Input_high ASR 31 equals 0xFFFFFFFF and scratch_high LSR 31 equals 1.
4509 // New result = (result eor 0xFFFFFFFF) + 1 = 0 - result.
4510 __ ShiftRightS32(r0, scratch_high, Operand(31));
4511 __ lgfr(r0, r0);
4512 __ ShiftRightU64(r0, r0, Operand(32));
4513 __ XorP(result_reg, r0);
4514 __ ShiftRightU32(r0, scratch_high, Operand(31));
4515 __ AddS64(result_reg, r0);
4516
4517 __ bind(&done);
4518 __ Pop(scratch_high, scratch_low);
4519 argument_offset -= 2 * kSystemPointerSize;
4520
4521 __ bind(&fastpath_done);
4522 __ StoreU64(result_reg, MemOperand(sp, argument_offset));
4523 __ Pop(result_reg, scratch);
4524
4525 __ Ret();
4526}
4527
4528void Builtins::Generate_CallApiCallbackImpl(MacroAssembler* masm,
4529 CallApiCallbackMode mode) {
4530 // ----------- S t a t e -------------
4531 // CallApiCallbackMode::kOptimizedNoProfiling/kOptimized modes:
4532 // -- r4 : api function address
4533 // Both modes:
4534 // -- r4 : arguments count (not including the receiver)
4535 // -- r5 : FunctionTemplateInfo
4536 // -- cp
4537 // -- sp[0] : receiver
4538 // -- sp[8] : first argument
4539 // -- ...
4540 // -- sp[(argc) * 8] : last argument
4541 // -----------------------------------
4542
4543 Register function_callback_info_arg = kCArgRegs[0];
4544
4545 Register api_function_address = no_reg;
4546 Register argc = no_reg;
4547 Register func_templ = no_reg;
4548 Register topmost_script_having_context = no_reg;
4549 Register scratch = r6;
4550
4551 switch (mode) {
4553 argc = CallApiCallbackGenericDescriptor::ActualArgumentsCountRegister();
4554 topmost_script_having_context = CallApiCallbackGenericDescriptor::
4556 func_templ =
4558 break;
4559
4562 // Caller context is always equal to current context because we don't
4563 // inline Api calls cross-context.
4564 topmost_script_having_context = kContextRegister;
4565 api_function_address =
4566 CallApiCallbackOptimizedDescriptor::ApiFunctionAddressRegister();
4568 func_templ =
4570 break;
4571 }
4572 DCHECK(!AreAliased(api_function_address, topmost_script_having_context, argc,
4573 func_templ, scratch));
4574
4575 using FCA = FunctionCallbackArguments;
4576 using ER = ExternalReference;
4577 using FC = ApiCallbackExitFrameConstants;
4578
4579 static_assert(FCA::kArgsLength == 6);
4580 static_assert(FCA::kNewTargetIndex == 5);
4581 static_assert(FCA::kTargetIndex == 4);
4582 static_assert(FCA::kReturnValueIndex == 3);
4583 static_assert(FCA::kContextIndex == 2);
4584 static_assert(FCA::kIsolateIndex == 1);
4585 static_assert(FCA::kUnusedIndex == 0);
4586
4587 // Set up FunctionCallbackInfo's implicit_args on the stack as follows:
4588 //
4589 // Target state:
4590 // sp[1 * kSystemPointerSize]: kUnused <= FCA::implicit_args_
4591 // sp[2 * kSystemPointerSize]: kIsolate
4592 // sp[3 * kSystemPointerSize]: kContext
4593 // sp[4 * kSystemPointerSize]: undefined (kReturnValue)
4594 // sp[5 * kSystemPointerSize]: kTarget
4595 // sp[6 * kSystemPointerSize]: undefined (kNewTarget)
4596 // Existing state:
4597 // sp[7 * kSystemPointerSize]: <= FCA:::values_
4598
4599 __ StoreRootRelative(IsolateData::topmost_script_having_context_offset(),
4600 topmost_script_having_context);
4601
4602 if (mode == CallApiCallbackMode::kGeneric) {
4603 api_function_address = ReassignRegister(topmost_script_having_context);
4604 }
4605
4606 // Reserve space on the stack.
4607 __ lay(sp, MemOperand(sp, -(FCA::kArgsLength * kSystemPointerSize)));
4608
4609 // kIsolate.
4610 __ Move(scratch, ER::isolate_address());
4611 __ StoreU64(scratch, MemOperand(sp, FCA::kIsolateIndex * kSystemPointerSize));
4612
4613 // kContext
4614 __ StoreU64(cp, MemOperand(sp, FCA::kContextIndex * kSystemPointerSize));
4615
4616 // kReturnValue.
4617 __ LoadRoot(scratch, RootIndex::kUndefinedValue);
4618 __ StoreU64(scratch,
4619 MemOperand(sp, FCA::kReturnValueIndex * kSystemPointerSize));
4620
4621 // kTarget.
4622 __ StoreU64(func_templ,
4623 MemOperand(sp, FCA::kTargetIndex * kSystemPointerSize));
4624
4625 // kNewTarget.
4626 __ StoreU64(scratch,
4627 MemOperand(sp, FCA::kNewTargetIndex * kSystemPointerSize));
4628
4629 // kUnused.
4630 __ StoreU64(scratch, MemOperand(sp, FCA::kUnusedIndex * kSystemPointerSize));
4631
4632 FrameScope frame_scope(masm, StackFrame::MANUAL);
4633 if (mode == CallApiCallbackMode::kGeneric) {
4634 __ LoadU64(
4635 api_function_address,
4636 FieldMemOperand(func_templ,
4637 FunctionTemplateInfo::kMaybeRedirectedCallbackOffset));
4638 }
4639 __ EnterExitFrame(scratch, FC::getExtraSlotsCountFrom<ExitFrameConstants>(),
4640 StackFrame::API_CALLBACK_EXIT);
4641
4642 MemOperand argc_operand = MemOperand(fp, FC::kFCIArgcOffset);
4643 {
4644 ASM_CODE_COMMENT_STRING(masm, "Initialize v8::FunctionCallbackInfo");
4645 // FunctionCallbackInfo::length_.
4646 // TODO(ishell): pass JSParameterCount(argc) to simplify things on the
4647 // caller end.
4648 __ StoreU64(argc, argc_operand);
4649
4650 // FunctionCallbackInfo::implicit_args_.
4651 __ AddS64(scratch, fp, Operand(FC::kImplicitArgsArrayOffset));
4652 __ StoreU64(scratch, MemOperand(fp, FC::kFCIImplicitArgsOffset));
4653
4654 // FunctionCallbackInfo::values_ (points at JS arguments on the stack).
4655 __ AddS64(scratch, fp, Operand(FC::kFirstArgumentOffset));
4656 __ StoreU64(scratch, MemOperand(fp, FC::kFCIValuesOffset));
4657 }
4658
4659 __ RecordComment("v8::FunctionCallback's argument.");
4660 __ AddS64(function_callback_info_arg, fp,
4661 Operand(FC::kFunctionCallbackInfoOffset));
4662
4663 DCHECK(!AreAliased(api_function_address, function_callback_info_arg));
4664
4665 ExternalReference thunk_ref = ER::invoke_function_callback(mode);
4666 Register no_thunk_arg = no_reg;
4667
4668 MemOperand return_value_operand = MemOperand(fp, FC::kReturnValueOffset);
4669 static constexpr int kSlotsToDropOnReturn =
4670 FC::kFunctionCallbackInfoArgsLength + kJSArgcReceiverSlots;
4671
4672 const bool with_profiling =
4674 CallApiFunctionAndReturn(masm, with_profiling, api_function_address,
4675 thunk_ref, no_thunk_arg, kSlotsToDropOnReturn,
4676 &argc_operand, return_value_operand);
4677}
4678
4679void Builtins::Generate_CallApiGetter(MacroAssembler* masm) {
4680 // ----------- S t a t e -------------
4681 // -- cp : context
4682 // -- r3 : receiver
4683 // -- r5 : accessor info
4684 // -- r2 : holder
4685 // -----------------------------------
4686
4687 // Build v8::PropertyCallbackInfo::args_ array on the stack and push property
4688 // name below the exit frame to make GC aware of them.
4689 using PCA = PropertyCallbackArguments;
4690 using ER = ExternalReference;
4691 using FC = ApiAccessorExitFrameConstants;
4692
4693 static_assert(PCA::kPropertyKeyIndex == 0);
4694 static_assert(PCA::kShouldThrowOnErrorIndex == 1);
4695 static_assert(PCA::kHolderIndex == 2);
4696 static_assert(PCA::kIsolateIndex == 3);
4697 static_assert(PCA::kHolderV2Index == 4);
4698 static_assert(PCA::kReturnValueIndex == 5);
4699 static_assert(PCA::kDataIndex == 6);
4700 static_assert(PCA::kThisIndex == 7);
4701 static_assert(PCA::kArgsLength == 8);
4702
4703 // Set up v8::PropertyCallbackInfo's (PCI) args_ on the stack as follows:
4704 // Target state:
4705 // sp[0 * kSystemPointerSize]: name <= PCI::args_
4706 // sp[1 * kSystemPointerSize]: kShouldThrowOnErrorIndex
4707 // sp[2 * kSystemPointerSize]: kHolderIndex
4708 // sp[3 * kSystemPointerSize]: kIsolateIndex
4709 // sp[4 * kSystemPointerSize]: kHolderV2Index
4710 // sp[5 * kSystemPointerSize]: kReturnValueIndex
4711 // sp[6 * kSystemPointerSize]: kDataIndex
4712 // sp[7 * kSystemPointerSize]: kThisIndex / receiver
4713
4714 Register name_arg = kCArgRegs[0];
4715 Register property_callback_info_arg = kCArgRegs[1];
4716
4717 Register api_function_address = r4;
4718 Register receiver = ApiGetterDescriptor::ReceiverRegister();
4721 Register scratch = r6;
4722 Register smi_zero = r7;
4723
4724 DCHECK(!AreAliased(receiver, holder, callback, scratch, smi_zero));
4725
4726 __ LoadTaggedField(scratch,
4727 FieldMemOperand(callback, AccessorInfo::kDataOffset), r1);
4728 __ Push(receiver, scratch);
4729 __ LoadRoot(scratch, RootIndex::kUndefinedValue);
4730 __ Move(smi_zero, Smi::zero());
4731 __ Push(scratch, smi_zero); // kReturnValueIndex, kHolderV2Index
4732 __ Move(scratch, ER::isolate_address());
4733 __ Push(scratch, holder);
4734 __ LoadTaggedField(name_arg,
4735 FieldMemOperand(callback, AccessorInfo::kNameOffset), r1);
4736 static_assert(kDontThrow == 0);
4737 __ Push(smi_zero, name_arg); // should_throw_on_error -> kDontThrow, name
4738
4739 __ RecordComment("Load api_function_address");
4740 __ LoadU64(
4741 api_function_address,
4742 FieldMemOperand(callback, AccessorInfo::kMaybeRedirectedGetterOffset));
4743
4744 FrameScope frame_scope(masm, StackFrame::MANUAL);
4745 __ EnterExitFrame(scratch, FC::getExtraSlotsCountFrom<ExitFrameConstants>(),
4746 StackFrame::API_ACCESSOR_EXIT);
4747
4748 __ RecordComment("Create v8::PropertyCallbackInfo object on the stack.");
4749 // property_callback_info_arg = v8::PropertyCallbackInfo&
4750 __ AddS64(property_callback_info_arg, fp, Operand(FC::kArgsArrayOffset));
4751
4752 DCHECK(!AreAliased(api_function_address, property_callback_info_arg, name_arg,
4753 callback, scratch));
4754
4755#ifdef V8_ENABLE_DIRECT_HANDLE
4756 // name_arg = Local<Name>(name), name value was pushed to GC-ed stack space.
4757 // |name_arg| is already initialized above.
4758#else
4759 // name_arg = Local<Name>(&name), which is &args_array[kPropertyKeyIndex].
4760 static_assert(PCA::kPropertyKeyIndex == 0);
4761 __ mov(name_arg, property_callback_info_arg);
4762#endif
4763
4764 ExternalReference thunk_ref = ER::invoke_accessor_getter_callback();
4765 // Pass AccessorInfo to thunk wrapper in case profiler or side-effect
4766 // checking is enabled.
4767 Register thunk_arg = callback;
4768
4769 MemOperand return_value_operand = MemOperand(fp, FC::kReturnValueOffset);
4770 static constexpr int kSlotsToDropOnReturn =
4771 FC::kPropertyCallbackInfoArgsLength;
4772 MemOperand* const kUseStackSpaceConstant = nullptr;
4773
4774 const bool with_profiling = true;
4775 CallApiFunctionAndReturn(masm, with_profiling, api_function_address,
4776 thunk_ref, thunk_arg, kSlotsToDropOnReturn,
4777 kUseStackSpaceConstant, return_value_operand);
4778}
4779
4780void Builtins::Generate_DirectCEntry(MacroAssembler* masm) {
4781 // Unused.
4782 __ stop();
4783}
4784
4785namespace {
4786
4787// This code tries to be close to ia32 code so that any changes can be
4788// easily ported.
4789void Generate_DeoptimizationEntry(MacroAssembler* masm,
4790 DeoptimizeKind deopt_kind) {
4791 Isolate* isolate = masm->isolate();
4792
4793 // Save all the registers onto the stack
4795
4796 RegList restored_regs = kJSCallerSaved | kCalleeSaved;
4797
4798 const int kDoubleRegsSize = kDoubleSize * DoubleRegister::kNumRegisters;
4799
4800 // Save all double registers before messing with them.
4801 __ lay(sp, MemOperand(sp, -kDoubleRegsSize));
4802 const RegisterConfiguration* config = RegisterConfiguration::Default();
4803 for (int i = 0; i < config->num_allocatable_double_registers(); ++i) {
4804 int code = config->GetAllocatableDoubleCode(i);
4805 const DoubleRegister dreg = DoubleRegister::from_code(code);
4806 int offset = code * kDoubleSize;
4807 __ StoreF64(dreg, MemOperand(sp, offset));
4808 }
4809
4810 // Push all GPRs onto the stack
4812 __ StoreMultipleP(r0, sp, MemOperand(sp)); // Save all 16 registers
4813
4814 __ Move(r1, ExternalReference::Create(IsolateAddressId::kCEntryFPAddress,
4815 isolate));
4816 __ StoreU64(fp, MemOperand(r1));
4817
4818 static constexpr int kSavedRegistersAreaSize =
4819 (kNumberOfRegisters * kSystemPointerSize) + kDoubleRegsSize;
4820
4821 // Get the address of the location in the code object (r5)(return
4822 // address for lazy deoptimization) and compute the fp-to-sp delta in
4823 // register r6.
4824 __ mov(r4, r14);
4825 __ la(r5, MemOperand(sp, kSavedRegistersAreaSize));
4826 __ SubS64(r5, fp, r5);
4827
4828 // Allocate a new deoptimizer object.
4829 // Pass six arguments in r2 to r7.
4830 __ PrepareCallCFunction(5, r7);
4831 __ mov(r2, Operand::Zero());
4832 Label context_check;
4833 __ LoadU64(r3,
4835 __ JumpIfSmi(r3, &context_check);
4837 __ bind(&context_check);
4838 __ mov(r3, Operand(static_cast<int>(deopt_kind)));
4839 // r4: code address or 0 already loaded.
4840 // r5: Fp-to-sp delta already loaded.
4841 // Parm6: isolate is passed on the stack.
4843 __ StoreU64(r6,
4845
4846 // Call Deoptimizer::New().
4847 {
4848 AllowExternalCallThatCantCauseGC scope(masm);
4849 __ CallCFunction(ExternalReference::new_deoptimizer_function(), 5);
4850 }
4851
4852 // Preserve "deoptimizer" object in register r2 and get the input
4853 // frame descriptor pointer to r3 (deoptimizer->input_);
4854 __ LoadU64(r3, MemOperand(r2, Deoptimizer::input_offset()));
4855
4856 // Copy core registers into FrameDescription::registers_[kNumRegisters].
4857 // DCHECK_EQ(Register::kNumRegisters, kNumberOfRegisters);
4858 // __ mvc(MemOperand(r3, FrameDescription::registers_offset()),
4859 // MemOperand(sp), kNumberOfRegisters * kSystemPointerSize);
4860 // Copy core registers into FrameDescription::registers_[kNumRegisters].
4861 // TODO(john.yan): optimize the following code by using mvc instruction
4863 for (int i = 0; i < kNumberOfRegisters; i++) {
4864 int offset =
4866 __ LoadU64(r4, MemOperand(sp, i * kSystemPointerSize));
4867 __ StoreU64(r4, MemOperand(r3, offset));
4868 }
4869
4870 int simd128_regs_offset = FrameDescription::simd128_registers_offset();
4871 // Copy double registers to
4872 // double_registers_[DoubleRegister::kNumRegisters]
4873 for (int i = 0; i < config->num_allocatable_double_registers(); ++i) {
4874 int code = config->GetAllocatableDoubleCode(i);
4875 int dst_offset = code * kSimd128Size + simd128_regs_offset;
4876 int src_offset =
4878 // TODO(joransiu): MVC opportunity
4879 __ LoadF64(d0, MemOperand(sp, src_offset));
4880 __ StoreF64(d0, MemOperand(r3, dst_offset));
4881 }
4882
4883 // Mark the stack as not iterable for the CPU profiler which won't be able to
4884 // walk the stack without the return address.
4885 {
4886 UseScratchRegisterScope temps(masm);
4887 Register is_iterable = temps.Acquire();
4888 Register zero = r6;
4889 __ LoadIsolateField(is_iterable, IsolateFieldId::kStackIsIterable);
4890 __ lhi(zero, Operand(0));
4891 __ StoreU8(zero, MemOperand(is_iterable));
4892 }
4893
4894 // Remove the saved registers from the stack.
4895 __ la(sp, MemOperand(sp, kSavedRegistersAreaSize));
4896
4897 // Compute a pointer to the unwinding limit in register r4; that is
4898 // the first stack slot not part of the input frame.
4900 __ AddS64(r4, sp);
4901
4902 // Unwind the stack down to - but not including - the unwinding
4903 // limit and copy the contents of the activation frame to the input
4904 // frame description.
4906 Label pop_loop;
4907 Label pop_loop_header;
4908 __ b(&pop_loop_header, Label::kNear);
4909 __ bind(&pop_loop);
4910 __ pop(r6);
4911 __ StoreU64(r6, MemOperand(r5, 0));
4912 __ la(r5, MemOperand(r5, kSystemPointerSize));
4913 __ bind(&pop_loop_header);
4914 __ CmpS64(r4, sp);
4915 __ bne(&pop_loop);
4916
4917 // Compute the output frame in the deoptimizer.
4918 __ push(r2); // Preserve deoptimizer object across call.
4919 // r2: deoptimizer object; r3: scratch.
4920 __ PrepareCallCFunction(1, r3);
4921 // Call Deoptimizer::ComputeOutputFrames().
4922 {
4923 AllowExternalCallThatCantCauseGC scope(masm);
4924 __ CallCFunction(ExternalReference::compute_output_frames_function(), 1);
4925 }
4926 __ pop(r2); // Restore deoptimizer object (class Deoptimizer).
4927
4929
4930 // Replace the current (input) frame with the output frames.
4931 Label outer_push_loop, inner_push_loop, outer_loop_header, inner_loop_header;
4932 // Outer loop state: r6 = current "FrameDescription** output_",
4933 // r3 = one past the last FrameDescription**.
4935 __ LoadU64(r6,
4936 MemOperand(r2, Deoptimizer::output_offset())); // r6 is output_.
4937 __ ShiftLeftU64(r3, r3, Operand(kSystemPointerSizeLog2));
4938 __ AddS64(r3, r6, r3);
4939 __ b(&outer_loop_header, Label::kNear);
4940
4941 __ bind(&outer_push_loop);
4942 // Inner loop state: r4 = current FrameDescription*, r5 = loop index.
4943 __ LoadU64(r4, MemOperand(r6, 0)); // output_[ix]
4945 __ b(&inner_loop_header, Label::kNear);
4946
4947 __ bind(&inner_push_loop);
4948 __ SubS64(r5, Operand(sizeof(intptr_t)));
4949 __ AddS64(r8, r4, r5);
4951 __ push(r8);
4952
4953 __ bind(&inner_loop_header);
4954 __ CmpS64(r5, Operand::Zero());
4955 __ bne(&inner_push_loop); // test for gt?
4956
4957 __ AddS64(r6, r6, Operand(kSystemPointerSize));
4958 __ bind(&outer_loop_header);
4959 __ CmpS64(r6, r3);
4960 __ blt(&outer_push_loop);
4961
4962 __ LoadU64(r3, MemOperand(r2, Deoptimizer::input_offset()));
4963 for (int i = 0; i < config->num_allocatable_double_registers(); ++i) {
4964 int code = config->GetAllocatableDoubleCode(i);
4965 const DoubleRegister dreg = DoubleRegister::from_code(code);
4966 int src_offset = code * kSimd128Size + simd128_regs_offset;
4967 __ ld(dreg, MemOperand(r3, src_offset));
4968 }
4969
4970 // Push pc and continuation from the last output frame.
4971 __ LoadU64(r8, MemOperand(r4, FrameDescription::pc_offset()));
4972 __ push(r8);
4974 __ push(r8);
4975
4976 // Restore the registers from the last output frame.
4977 __ mov(r1, r4);
4978 for (int i = kNumberOfRegisters - 1; i > 0; i--) {
4979 int offset =
4981 if ((restored_regs.bits() & (1 << i)) != 0) {
4982 __ LoadU64(ToRegister(i), MemOperand(r1, offset));
4983 }
4984 }
4985
4986 {
4987 UseScratchRegisterScope temps(masm);
4988 Register is_iterable = temps.Acquire();
4989 Register one = r6;
4990 __ push(one); // Save the value from the output FrameDescription.
4991 __ LoadIsolateField(is_iterable, IsolateFieldId::kStackIsIterable);
4992 __ lhi(one, Operand(1));
4993 __ StoreU8(one, MemOperand(is_iterable));
4994 __ pop(one); // Restore the value from the output FrameDescription.
4995 }
4996
4997 {
4998 __ pop(ip); // get continuation, leave pc on stack
4999 __ pop(r14);
5000 Label end;
5001 __ CmpU64(ip, Operand::Zero());
5002 __ beq(&end);
5003 __ Jump(ip);
5004 __ bind(&end);
5005 __ Ret();
5006 }
5007
5008 __ stop();
5009}
5010
5011} // namespace
5012
5013void Builtins::Generate_DeoptimizationEntry_Eager(MacroAssembler* masm) {
5014 Generate_DeoptimizationEntry(masm, DeoptimizeKind::kEager);
5015}
5016
5017void Builtins::Generate_DeoptimizationEntry_Lazy(MacroAssembler* masm) {
5018 Generate_DeoptimizationEntry(masm, DeoptimizeKind::kLazy);
5019}
5020
5021void Builtins::Generate_InterpreterOnStackReplacement(MacroAssembler* masm) {
5022 using D = OnStackReplacementDescriptor;
5023 static_assert(D::kParameterCount == 2);
5024 OnStackReplacement(masm, OsrSourceTier::kInterpreter,
5025 D::MaybeTargetCodeRegister(),
5026 D::ExpectedParameterCountRegister());
5027}
5028
5029void Builtins::Generate_BaselineOnStackReplacement(MacroAssembler* masm) {
5030 using D = OnStackReplacementDescriptor;
5031 static_assert(D::kParameterCount == 2);
5032
5033 __ LoadU64(kContextRegister,
5035 OnStackReplacement(masm, OsrSourceTier::kBaseline,
5036 D::MaybeTargetCodeRegister(),
5037 D::ExpectedParameterCountRegister());
5038}
5039
5040void Builtins::Generate_RestartFrameTrampoline(MacroAssembler* masm) {
5041 // Frame is being dropped:
5042 // - Look up current function on the frame.
5043 // - Leave the frame.
5044 // - Restart the frame by calling the function.
5045
5048 __ LeaveFrame(StackFrame::INTERPRETED);
5049
5050 // The arguments are already in the stack (including any necessary padding),
5051 // we should not try to massage the arguments again.
5052 __ mov(r4, Operand(kDontAdaptArgumentsSentinel));
5053 __ InvokeFunction(r3, r4, r2, InvokeType::kJump);
5054}
5055
5056#undef __
5057
5058} // namespace internal
5059} // namespace v8
5060
5061#endif // V8_TARGET_ARCH_S390X
#define one
#define Assert(condition)
const RegList initial_
RegList available_
#define JUMP_IF_EQUAL(NAME)
#define ASSIGN_REG(Name)
RegisterAllocator * allocator_
std::vector< Register * > allocated_registers_
#define ASSIGN_PINNED(Name, Reg)
#define DEFINE_PINNED(Name, Reg)
#define DEFINE_SCOPED(Name)
Register * reg_
#define FREE_REG(Name)
#define DEFINE_REG(Name)
interpreter::Bytecode bytecode
Definition builtins.cc:43
#define RETURN_BYTECODE_LIST(V)
Definition bytecodes.h:575
static void Generate_InterpreterPushArgsThenConstructImpl(MacroAssembler *masm, InterpreterPushArgsMode mode)
static void Generate_CallOrConstructForwardVarargs(MacroAssembler *masm, CallOrConstructMode mode, Builtin target_builtin)
static CallInterfaceDescriptor CallInterfaceDescriptorFor(Builtin builtin)
Definition builtins.cc:189
static void Generate_InterpreterEntryTrampoline(MacroAssembler *masm, InterpreterEntryTrampolineMode mode)
static void Generate_Adaptor(MacroAssembler *masm, int formal_parameter_count, Address builtin_address)
static void Generate_CEntry(MacroAssembler *masm, int result_size, ArgvMode argv_mode, bool builtin_exit_frame, bool switch_to_central_stack)
static constexpr Builtin CallFunction(ConvertReceiverMode=ConvertReceiverMode::kAny)
static constexpr Builtin AdaptorWithBuiltinExitFrame(int formal_parameter_count)
static void Generate_MaglevFunctionEntryStackCheck(MacroAssembler *masm, bool save_new_target)
static void Generate_Call(MacroAssembler *masm, ConvertReceiverMode mode)
static void Generate_CallFunction(MacroAssembler *masm, ConvertReceiverMode mode)
static void Generate_CallOrConstructVarargs(MacroAssembler *masm, Builtin target_builtin)
static void Generate_CallApiCallbackImpl(MacroAssembler *masm, CallApiCallbackMode mode)
static constexpr Builtin Call(ConvertReceiverMode=ConvertReceiverMode::kAny)
static void Generate_CallBoundFunctionImpl(MacroAssembler *masm)
static void Generate_ConstructForwardAllArgsImpl(MacroAssembler *masm, ForwardWhichFrame which_frame)
static void Generate_InterpreterPushArgsThenCallImpl(MacroAssembler *masm, ConvertReceiverMode receiver_mode, InterpreterPushArgsMode mode)
static DEFINE_PARAMETERS_VARARGS(kActualArgumentsCount, kTopmostScriptHavingContext, kFunctionTemplateInfo) DEFINE_PARAMETER_TYPES(MachineType constexpr Register TopmostScriptHavingContextRegister()
static DEFINE_PARAMETERS_VARARGS(kApiFunctionAddress, kActualArgumentsCount, kFunctionTemplateInfo) DEFINE_PARAMETER_TYPES(MachineType constexpr Register ActualArgumentsCountRegister()
static constexpr int kContextOrFrameTypeOffset
static constexpr int kFixedSlotCountAboveFp
static constexpr int kFixedFrameSizeAboveFp
static int caller_frame_top_offset()
static int output_count_offset()
static constexpr int kNextExitFrameFPOffset
static constexpr int kNextFastCallFramePCOffset
static V8_EXPORT_PRIVATE ExternalReference isolate_address()
static ExternalReference Create(const SCTableReference &table_ref)
static constexpr int simd128_registers_offset()
static const uint32_t kMantissaMask
Definition heap-number.h:38
static const uint32_t kExponentMask
Definition heap-number.h:37
static const int kMantissaBitsInTopWord
Definition heap-number.h:45
static const int kExponentBias
Definition heap-number.h:41
static constexpr int kHeaderSize
static constexpr int kMapOffset
static constexpr uint32_t thread_in_wasm_flag_address_offset()
Definition isolate.h:1338
static int32_t RootRegisterOffsetForRootIndex(RootIndex root_index)
static V8_INLINE Operand Zero()
static constexpr DwVfpRegister from_code(int8_t code)
constexpr int8_t code() const
static const RegisterConfiguration * Default()
static constexpr Register from_code(int code)
static constexpr int kMantissaOffset
static constexpr int kExponentOffset
static constexpr Tagged< Smi > FromInt(int value)
Definition smi.h:38
static constexpr Tagged< Smi > zero()
Definition smi.h:99
static constexpr int32_t TypeToMarker(Type type)
Definition frames.h:196
static constexpr int kFixedFrameSizeFromFp
static constexpr int kFrameTypeOffset
static constexpr DoubleRegList kPushedFpRegs
static constexpr int SharedFunctionInfoOffsetInTaggedJSFunction()
#define ASM_CODE_COMMENT_STRING(asm,...)
Definition assembler.h:618
#define ASM_CODE_COMMENT(asm)
Definition assembler.h:617
#define V8_ENABLE_SANDBOX_BOOL
Definition globals.h:160
#define ABI_RETURNS_OBJECTPAIR_IN_REGS
int start
int end
bool is_construct
Definition execution.cc:82
int32_t offset
TNode< Context > context
TNode< Object > receiver
TNode< Object > callback
LiftoffRegister reg
int pc_offset
RegListBase< RegisterT > registers
const int length_
Definition mul-fft.cc:473
STL namespace.
int int32_t
Definition unicode.cc:40
void Free(void *memory)
Definition memory.h:63
ApiCallbackExitFrameConstants FC
Definition frames.cc:1270
FunctionCallbackArguments FCA
Definition frames.cc:1271
void push(LiftoffAssembler *assm, LiftoffRegister reg, ValueKind kind, int padding=0)
constexpr int kStackStateOffset
Definition stacks.h:212
constexpr DoubleRegister kFpReturnRegisters[]
constexpr int kStackSpOffset
Definition stacks.h:202
constexpr int kStackFpOffset
Definition stacks.h:204
constexpr Register kGpParamRegisters[]
constexpr DoubleRegister kFpParamRegisters[]
constexpr int kStackParentOffset
Definition stacks.h:210
uint32_t WasmInterpreterRuntime int64_t r0
constexpr Register kGpReturnRegisters[]
constexpr int kStackLimitOffset
Definition stacks.h:208
constexpr int kStackPcOffset
Definition stacks.h:206
constexpr Register no_reg
constexpr Register kRootRegister
constexpr int kByteSize
Definition globals.h:395
constexpr int kFunctionEntryBytecodeOffset
Definition globals.h:854
RegListBase< DoubleRegister > DoubleRegList
Definition reglist-arm.h:15
constexpr int kTaggedSize
Definition globals.h:542
constexpr int kSimd128Size
Definition globals.h:706
const int kNumRequiredStackFrameSlots
DwVfpRegister DoubleRegister
constexpr DoubleRegister kScratchDoubleReg
const RegList kCalleeSaved
Definition reglist-arm.h:31
static void Generate_InterpreterEnterBytecode(MacroAssembler *masm)
RegListBase< Register > RegList
Definition reglist-arm.h:14
constexpr Register kJavaScriptCallTargetRegister
const int kStackFrameExtraParamSlot
constexpr int kNumberOfRegisters
constexpr uint16_t kDontAdaptArgumentsSentinel
Definition globals.h:2779
constexpr Register kJavaScriptCallArgCountRegister
constexpr Register kInterpreterAccumulatorRegister
constexpr int kSystemPointerSizeLog2
Definition globals.h:494
InterpreterPushArgsMode
Definition globals.h:2233
constexpr int kJSArgcReceiverSlots
Definition globals.h:2778
static void GenerateInterpreterPushArgs(MacroAssembler *masm, Register num_args, Register start_address, Register scratch)
static void AdvanceBytecodeOffsetOrReturn(MacroAssembler *masm, Register bytecode_array, Register bytecode_offset, Register bytecode, Register scratch1, Register scratch2, Register scratch3, Label *if_return)
MemOperand FieldMemOperand(Register object, int offset)
constexpr int kSystemPointerSize
Definition globals.h:410
static void LeaveInterpreterFrame(MacroAssembler *masm, Register scratch1, Register scratch2)
constexpr Register kReturnRegister1
constexpr int kTaggedSizeLog2
Definition globals.h:543
const int kStackFrameSPSlot
constexpr Register kReturnRegister0
@ LAST_CALLABLE_JS_FUNCTION_TYPE
@ FIRST_CALLABLE_JS_FUNCTION_TYPE
constexpr Register kWasmImplicitArgRegister
constexpr Register kContextRegister
V8_EXPORT_PRIVATE bool AreAliased(const CPURegister &reg1, const CPURegister &reg2, const CPURegister &reg3=NoReg, const CPURegister &reg4=NoReg, const CPURegister &reg5=NoReg, const CPURegister &reg6=NoReg, const CPURegister &reg7=NoReg, const CPURegister &reg8=NoReg)
const int kNumCalleeSavedDoubles
Definition reglist-ppc.h:59
constexpr Register kInterpreterDispatchTableRegister
const int kCalleeRegisterSaveAreaSize
const int kHeapObjectTag
Definition v8-internal.h:72
V8_EXPORT_PRIVATE FlagValues v8_flags
constexpr Register kJavaScriptCallExtraArg1Register
const RegList kJSCallerSaved
Definition reglist-arm.h:23
constexpr int JSParameterCount(int param_count_without_receiver)
Definition globals.h:2782
Register ToRegister(int num)
constexpr Register kJavaScriptCallCodeStartRegister
constexpr Register kPtrComprCageBaseRegister
Register ReassignRegister(Register &source)
constexpr Register kWasmCompileLazyFuncIndexRegister
static void AssertCodeIsBaseline(MacroAssembler *masm, Register code, Register scratch)
static void Generate_JSEntryTrampolineHelper(MacroAssembler *masm, bool is_construct)
void CallApiFunctionAndReturn(MacroAssembler *masm, bool with_profiling, Register function_address, ExternalReference thunk_ref, Register thunk_arg, int slots_to_drop_on_return, MemOperand *argc_operand, MemOperand return_value_operand)
Register GetRegisterThatIsNotOneOf(Register reg1, Register reg2=no_reg, Register reg3=no_reg, Register reg4=no_reg, Register reg5=no_reg, Register reg6=no_reg)
constexpr Register cp
constexpr Register kCArgRegs[]
constexpr int kDoubleSize
Definition globals.h:407
const int kNumCalleeSaved
Definition reglist-arm.h:48
static void GetSharedFunctionInfoBytecodeOrBaseline(MacroAssembler *masm, Register sfi, Register bytecode, Register scratch1, Label *is_baseline, Label *is_unavailable)
constexpr Register kInterpreterBytecodeOffsetRegister
constexpr Register kJavaScriptCallNewTargetRegister
constexpr Register kJSFunctionRegister
constexpr Register kInterpreterBytecodeArrayRegister
#define DCHECK_NE(v1, v2)
Definition logging.h:486
#define CHECK_EQ(lhs, rhs)
#define DCHECK(condition)
Definition logging.h:482
#define DCHECK_EQ(v1, v2)
Definition logging.h:485
#define USE(...)
Definition macros.h:293
constexpr T RoundUp(T x, intptr_t m)
Definition macros.h:387
#define arraysize(array)
Definition macros.h:67
#define OFFSET_OF_DATA_START(Type)