v8
V8 is Google’s open source high-performance JavaScript and WebAssembly engine, written in C++.
Loading...
Searching...
No Matches
builtins-ppc.cc
Go to the documentation of this file.
1// Copyright 2014 the V8 project authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
5#if V8_TARGET_ARCH_PPC64
6
12// For interpreter_entry_return_pc_offset. TODO(jkummerow): Drop.
15#include "src/debug/debug.h"
19#include "src/heap/heap-inl.h"
21#include "src/objects/cell.h"
22#include "src/objects/foreign.h"
25#include "src/objects/smi.h"
26#include "src/runtime/runtime.h"
27
28#if V8_ENABLE_WEBASSEMBLY
33#endif // V8_ENABLE_WEBASSEMBLY
34
35namespace v8 {
36namespace internal {
37
38#define __ ACCESS_MASM(masm)
39namespace {
40
41static void AssertCodeIsBaseline(MacroAssembler* masm, Register code,
42 Register scratch) {
43 DCHECK(!AreAliased(code, scratch));
44 // Verify that the code kind is baseline code via the CodeKind.
45 __ LoadU32(scratch, FieldMemOperand(code, Code::kFlagsOffset));
46 __ DecodeField<Code::KindField>(scratch);
47 __ CmpS64(scratch, Operand(static_cast<int>(CodeKind::BASELINE)), r0);
48 __ Assert(eq, AbortReason::kExpectedBaselineData);
49}
50
51static void CheckSharedFunctionInfoBytecodeOrBaseline(MacroAssembler* masm,
52 Register data,
53 Register scratch,
54 Label* is_baseline,
55 Label* is_bytecode) {
56 DCHECK(!AreAliased(r0, scratch));
57
58#if V8_STATIC_ROOTS_BOOL
59 __ IsObjectTypeFast(data, scratch, CODE_TYPE, r0);
60#else
61 __ CompareObjectType(data, scratch, scratch, CODE_TYPE);
62#endif // V8_STATIC_ROOTS_BOOL
63 if (v8_flags.debug_code) {
64 Label not_baseline;
65 __ b(ne, &not_baseline);
66 AssertCodeIsBaseline(masm, data, scratch);
67 __ b(eq, is_baseline);
68 __ bind(&not_baseline);
69 } else {
70 __ b(eq, is_baseline);
71 }
72
73#if V8_STATIC_ROOTS_BOOL
74 // scratch already contains the compressed map.
75 __ CompareInstanceTypeWithUniqueCompressedMap(scratch, Register::no_reg(),
76 INTERPRETER_DATA_TYPE);
77#else
78 // scratch already contains the instance type.
79 __ CmpU64(scratch, Operand(INTERPRETER_DATA_TYPE), r0);
80#endif // V8_STATIC_ROOTS_BOOL
81 __ b(ne, is_bytecode);
82}
83
85 MacroAssembler* masm, Register sfi, Register bytecode, Register scratch1,
86 Label* is_baseline, Label* is_unavailable) {
88 DCHECK(!AreAliased(bytecode, scratch1));
89 ASM_CODE_COMMENT(masm);
90 Label done;
91 Register data = bytecode;
92 __ LoadTrustedPointerField(
93 data,
94 FieldMemOperand(sfi, SharedFunctionInfo::kTrustedFunctionDataOffset),
96
97 if (V8_JITLESS_BOOL) {
98 __ IsObjectType(data, scratch1, scratch1, INTERPRETER_DATA_TYPE);
99 __ b(ne, &done);
100 } else {
101 CheckSharedFunctionInfoBytecodeOrBaseline(masm, data, scratch1, is_baseline,
102 &done);
103 }
104
105 __ LoadTrustedPointerField(
106 bytecode, FieldMemOperand(data, InterpreterData::kBytecodeArrayOffset),
107 kBytecodeArrayIndirectPointerTag, scratch1);
108
109 __ bind(&done);
110 __ IsObjectType(bytecode, scratch1, scratch1, BYTECODE_ARRAY_TYPE);
111 __ b(ne, is_unavailable);
112}
113
114void Generate_OSREntry(MacroAssembler* masm, Register entry_address,
115 intptr_t offset) {
116 __ AddS64(ip, entry_address, Operand(offset), r0);
117 __ mtlr(ip);
118
119 // "return" to the OSR entry point of the function.
120 __ Ret();
121}
122
123void ResetSharedFunctionInfoAge(MacroAssembler* masm, Register sfi,
124 Register scratch) {
125 DCHECK(!AreAliased(sfi, scratch));
126 __ mov(scratch, Operand(0));
127 __ StoreU16(scratch, FieldMemOperand(sfi, SharedFunctionInfo::kAgeOffset),
128 no_reg);
129}
130
131void ResetJSFunctionAge(MacroAssembler* masm, Register js_function,
132 Register scratch1, Register scratch2) {
133 __ LoadTaggedField(
134 scratch1,
135 FieldMemOperand(js_function, JSFunction::kSharedFunctionInfoOffset),
136 scratch2);
137 ResetSharedFunctionInfoAge(masm, scratch1, scratch2);
138}
139
140void ResetFeedbackVectorOsrUrgency(MacroAssembler* masm,
141 Register feedback_vector, Register scratch1,
142 Register scratch2) {
143 DCHECK(!AreAliased(feedback_vector, scratch1));
144 __ LoadU8(scratch1,
145 FieldMemOperand(feedback_vector, FeedbackVector::kOsrStateOffset),
146 scratch2);
147 __ andi(
148 scratch1, scratch1,
149 Operand(static_cast<uint8_t>(~FeedbackVector::OsrUrgencyBits::kMask)));
150 __ StoreU8(scratch1,
151 FieldMemOperand(feedback_vector, FeedbackVector::kOsrStateOffset),
152 scratch2);
153}
154
155} // namespace
156
157// If there is baseline code on the shared function info, converts an
158// interpreter frame into a baseline frame and continues execution in baseline
159// code. Otherwise execution continues with bytecode.
160void Builtins::Generate_InterpreterOnStackReplacement_ToBaseline(
161 MacroAssembler* masm) {
162 Label start;
163 __ bind(&start);
164
165 // Get function from the frame.
166 Register closure = r4;
168 r0);
169
170 // Get the InstructionStream object from the shared function info.
171 Register code_obj = r9;
172 __ LoadTaggedField(
173 code_obj, FieldMemOperand(closure, JSFunction::kSharedFunctionInfoOffset),
174 r0);
175
176 ResetSharedFunctionInfoAge(masm, code_obj, r6);
177
178 __ LoadTrustedPointerField(
179 code_obj,
180 FieldMemOperand(code_obj, SharedFunctionInfo::kTrustedFunctionDataOffset),
182
183 // For OSR entry it is safe to assume we always have baseline code.
184 if (v8_flags.debug_code) {
185 __ IsObjectType(code_obj, r6, r6, CODE_TYPE);
186 __ Assert(eq, AbortReason::kExpectedBaselineData);
187 AssertCodeIsBaseline(masm, code_obj, r6);
188 }
189
190 // Load the feedback cell and vector.
191 Register feedback_cell = r5;
192 Register feedback_vector = ip;
193 __ LoadTaggedField(feedback_cell,
194 FieldMemOperand(closure, JSFunction::kFeedbackCellOffset),
195 r0);
196 __ LoadTaggedField(feedback_vector,
197 FieldMemOperand(feedback_cell, FeedbackCell::kValueOffset),
198 r0);
199
200 Label install_baseline_code;
201 // Check if feedback vector is valid. If not, call prepare for baseline to
202 // allocate it.
203 __ IsObjectType(feedback_vector, r6, r6, FEEDBACK_VECTOR_TYPE);
204 __ b(ne, &install_baseline_code);
205
206 // Save BytecodeOffset from the stack frame.
210 // Replace bytecode offset with feedback cell.
213 __ StoreU64(feedback_cell,
215 feedback_cell = no_reg;
216 // Update feedback vector cache.
219 __ StoreU64(feedback_vector,
221 feedback_vector = no_reg;
222
223 // Compute baseline pc for bytecode offset.
224 Register get_baseline_pc = r6;
225 __ Move(get_baseline_pc,
226 ExternalReference::baseline_pc_for_next_executed_bytecode());
227
231
232 // Get bytecode array from the stack frame.
235 // Save the accumulator register, since it's clobbered by the below call.
237 __ Push(code_obj);
238 {
239 __ mr(kCArgRegs[0], code_obj);
242 FrameScope scope(masm, StackFrame::INTERNAL);
243 __ PrepareCallCFunction(4, 0, ip);
244 __ CallCFunction(get_baseline_pc, 3, 0);
245 }
246 __ Pop(code_obj);
247 __ LoadCodeInstructionStart(code_obj, code_obj);
248 __ AddS64(code_obj, code_obj, kReturnRegister0);
250
251 Generate_OSREntry(masm, code_obj, 0);
252 __ Trap(); // Unreachable.
253
254 __ bind(&install_baseline_code);
255 {
256 FrameScope scope(masm, StackFrame::INTERNAL);
258 __ Push(closure);
259 __ CallRuntime(Runtime::kInstallBaselineCode, 1);
261 }
262 // Retry from the start after installing baseline code.
263 __ b(&start);
264}
265
266void Builtins::Generate_Adaptor(MacroAssembler* masm,
267 int formal_parameter_count, Address address) {
269 __ TailCallBuiltin(
270 Builtins::AdaptorWithBuiltinExitFrame(formal_parameter_count));
271}
272
273namespace {
274
275enum class ArgumentsElementType {
276 kRaw, // Push arguments as they are.
277 kHandle // Dereference arguments before pushing.
278};
279
280void Generate_PushArguments(MacroAssembler* masm, Register array, Register argc,
281 Register scratch,
282 ArgumentsElementType element_type) {
283 DCHECK(!AreAliased(array, argc, scratch));
284 Label loop, done;
285 __ subi(scratch, argc, Operand(kJSArgcReceiverSlots));
286 __ cmpi(scratch, Operand::Zero());
287 __ beq(&done);
288 __ mtctr(scratch);
289 __ ShiftLeftU64(scratch, scratch, Operand(kSystemPointerSizeLog2));
290 __ add(scratch, array, scratch);
291
292 __ bind(&loop);
293 __ LoadU64WithUpdate(ip, MemOperand(scratch, -kSystemPointerSize));
294 if (element_type == ArgumentsElementType::kHandle) {
295 __ LoadU64(ip, MemOperand(ip));
296 }
297 __ push(ip);
298 __ bdnz(&loop);
299 __ bind(&done);
300}
301
302void Generate_JSBuiltinsConstructStubHelper(MacroAssembler* masm) {
303 // ----------- S t a t e -------------
304 // -- r3 : number of arguments
305 // -- r4 : constructor function
306 // -- r6 : new target
307 // -- cp : context
308 // -- lr : return address
309 // -- sp[...]: constructor arguments
310 // -----------------------------------
311
312 Register scratch = r5;
313
314 Label stack_overflow;
315
316 __ StackOverflowCheck(r3, scratch, &stack_overflow);
317 // Enter a construct frame.
318 {
319 FrameAndConstantPoolScope scope(masm, StackFrame::CONSTRUCT);
320
321 // Preserve the incoming parameters on the stack.
322
323 __ Push(cp, r3);
324
325 // TODO(victorgomes): When the arguments adaptor is completely removed, we
326 // should get the formal parameter count and copy the arguments in its
327 // correct position (including any undefined), instead of delaying this to
328 // InvokeFunction.
329
330 // Set up pointer to first argument (skip receiver).
331 __ addi(
332 r7, fp,
334 // Copy arguments and receiver to the expression stack.
335 // r7: Pointer to start of arguments.
336 // r3: Number of arguments.
337 Generate_PushArguments(masm, r7, r3, r8, ArgumentsElementType::kRaw);
338
339 // The receiver for the builtin/api call.
340 __ PushRoot(RootIndex::kTheHoleValue);
341
342 // Call the function.
343 // r3: number of arguments (untagged)
344 // r4: constructor function
345 // r6: new target
346 {
347 ConstantPoolUnavailableScope constant_pool_unavailable(masm);
348 __ InvokeFunctionWithNewTarget(r4, r6, r3, InvokeType::kCall);
349 }
350
351 // Restore context from the frame.
353 // Restore arguments count from the frame.
355
356 // Leave construct frame.
357 }
358 // Remove caller arguments from the stack and return.
359 __ DropArguments(scratch);
360 __ blr();
361
362 __ bind(&stack_overflow);
363 {
364 FrameScope scope(masm, StackFrame::INTERNAL);
365 __ CallRuntime(Runtime::kThrowStackOverflow);
366 __ bkpt(0); // Unreachable code.
367 }
368}
369
370enum class OsrSourceTier {
371 kInterpreter,
372 kBaseline,
373};
374
375void OnStackReplacement(MacroAssembler* masm, OsrSourceTier source,
376 Register maybe_target_code,
377 Register expected_param_count) {
378 Label jump_to_optimized_code;
379 {
380 // If maybe_target_code is not null, no need to call into runtime. A
381 // precondition here is: if maybe_target_code is an InstructionStream
382 // object, it must NOT be marked_for_deoptimization (callers must ensure
383 // this).
384 __ CmpSmiLiteral(maybe_target_code, Smi::zero(), r0);
385 __ bne(&jump_to_optimized_code);
386 }
387
388 ASM_CODE_COMMENT(masm);
389 {
390 FrameScope scope(masm, StackFrame::INTERNAL);
391 __ CallRuntime(Runtime::kCompileOptimizedOSR);
392 }
393
394 // If the code object is null, just return to the caller.
395 __ CmpSmiLiteral(r3, Smi::zero(), r0);
396 __ bne(&jump_to_optimized_code);
397 __ Ret();
398
399 __ bind(&jump_to_optimized_code);
400 DCHECK_EQ(maybe_target_code, r3); // Already in the right spot.
401
402 // OSR entry tracing.
403 {
404 Label next;
405 __ Move(r4, ExternalReference::address_of_log_or_trace_osr());
406 __ LoadU8(r4, MemOperand(r4));
407 __ andi(r0, r4, Operand(0xFF)); // Mask to the LSB.
408 __ beq(&next, cr0);
409
410 {
411 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
412 __ Push(r3); // Preserve the code object.
413 __ CallRuntime(Runtime::kLogOrTraceOptimizedOSREntry, 0);
414 __ Pop(r3);
415 }
416
417 __ bind(&next);
418 }
419
420 if (source == OsrSourceTier::kInterpreter) {
421 // Drop the handler frame that is be sitting on top of the actual
422 // JavaScript frame. This is the case then OSR is triggered from bytecode.
423 __ LeaveFrame(StackFrame::STUB);
424 }
425
426 // The sandbox would rely on testing expected_parameter_count here.
427 static_assert(!V8_ENABLE_SANDBOX_BOOL);
428
429 // Load deoptimization data from the code object.
430 // <deopt_data> = <code>[#deoptimization_data_offset]
431 __ LoadTaggedField(
432 r4, FieldMemOperand(r3, Code::kDeoptimizationDataOrInterpreterDataOffset),
433 r0);
434
435 {
436 ConstantPoolUnavailableScope constant_pool_unavailable(masm);
437
439 __ LoadConstantPoolPointerRegisterFromCodeTargetAddress(r3, r0, ip);
440 }
441
442 __ LoadCodeInstructionStart(r3, r3);
443
444 // Load the OSR entrypoint offset from the deoptimization data.
445 // <osr_offset> = <deopt_data>[#header_size + #osr_pc_offset]
446 __ SmiUntag(r4,
449 LeaveRC, r0);
450
451 // Compute the target address = code start + osr_offset
452 __ add(r0, r3, r4);
453
454 // And "return" to the OSR entry point of the function.
455 __ mtlr(r0);
456 __ blr();
457 }
458}
459
460} // namespace
461
462// The construct stub for ES5 constructor functions and ES6 class constructors.
463void Builtins::Generate_JSConstructStubGeneric(MacroAssembler* masm) {
464 // ----------- S t a t e -------------
465 // -- r3: number of arguments (untagged)
466 // -- r4: constructor function
467 // -- r6: new target
468 // -- cp: context
469 // -- lr: return address
470 // -- sp[...]: constructor arguments
471 // -----------------------------------
472
473 FrameScope scope(masm, StackFrame::MANUAL);
474 // Enter a construct frame.
475 Label post_instantiation_deopt_entry, not_create_implicit_receiver;
476 __ EnterFrame(StackFrame::CONSTRUCT);
477
478 // Preserve the incoming parameters on the stack.
479 __ Push(cp, r3, r4);
480 __ PushRoot(RootIndex::kUndefinedValue);
481 __ Push(r6);
482
483 // ----------- S t a t e -------------
484 // -- sp[0*kSystemPointerSize]: new target
485 // -- sp[1*kSystemPointerSize]: padding
486 // -- r4 and sp[2*kSystemPointerSize]: constructor function
487 // -- sp[3*kSystemPointerSize]: number of arguments
488 // -- sp[4*kSystemPointerSize]: context
489 // -----------------------------------
490
491 __ LoadTaggedField(
492 r7, FieldMemOperand(r4, JSFunction::kSharedFunctionInfoOffset), r0);
493 __ lwz(r7, FieldMemOperand(r7, SharedFunctionInfo::kFlagsOffset));
494 __ DecodeField<SharedFunctionInfo::FunctionKindBits>(r7);
495 __ JumpIfIsInRange(
496 r7, r0, static_cast<uint32_t>(FunctionKind::kDefaultDerivedConstructor),
497 static_cast<uint32_t>(FunctionKind::kDerivedConstructor),
498 &not_create_implicit_receiver);
499
500 // If not derived class constructor: Allocate the new receiver object.
501 __ CallBuiltin(Builtin::kFastNewObject);
502 __ b(&post_instantiation_deopt_entry);
503
504 // Else: use TheHoleValue as receiver for constructor call
505 __ bind(&not_create_implicit_receiver);
506 __ LoadRoot(r3, RootIndex::kTheHoleValue);
507
508 // ----------- S t a t e -------------
509 // -- r3: receiver
510 // -- Slot 4 / sp[0*kSystemPointerSize]: new target
511 // -- Slot 3 / sp[1*kSystemPointerSize]: padding
512 // -- Slot 2 / sp[2*kSystemPointerSize]: constructor function
513 // -- Slot 1 / sp[3*kSystemPointerSize]: number of arguments
514 // -- Slot 0 / sp[4*kSystemPointerSize]: context
515 // -----------------------------------
516 // Deoptimizer enters here.
517 masm->isolate()->heap()->SetConstructStubCreateDeoptPCOffset(
518 masm->pc_offset());
519 __ bind(&post_instantiation_deopt_entry);
520
521 // Restore new target.
522 __ Pop(r6);
523
524 // Push the allocated receiver to the stack.
525 __ Push(r3);
526 // We need two copies because we may have to return the original one
527 // and the calling conventions dictate that the called function pops the
528 // receiver. The second copy is pushed after the arguments, we saved in r6
529 // since r0 needs to store the number of arguments before
530 // InvokingFunction.
531 __ mr(r9, r3);
532
533 // Set up pointer to first argument (skip receiver).
534 __ addi(
535 r7, fp,
537
538 // ----------- S t a t e -------------
539 // -- r6: new target
540 // -- sp[0*kSystemPointerSize]: implicit receiver
541 // -- sp[1*kSystemPointerSize]: implicit receiver
542 // -- sp[2*kSystemPointerSize]: padding
543 // -- sp[3*kSystemPointerSize]: constructor function
544 // -- sp[4*kSystemPointerSize]: number of arguments
545 // -- sp[5*kSystemPointerSize]: context
546 // -----------------------------------
547
548 // Restore constructor function and argument count.
551
552 Label stack_overflow;
553 __ StackOverflowCheck(r3, r8, &stack_overflow);
554
555 // Copy arguments to the expression stack.
556 // r7: Pointer to start of argument.
557 // r3: Number of arguments.
558 Generate_PushArguments(masm, r7, r3, r8, ArgumentsElementType::kRaw);
559
560 // Push implicit receiver.
561 __ Push(r9);
562
563 // Call the function.
564 {
565 ConstantPoolUnavailableScope constant_pool_unavailable(masm);
566 __ InvokeFunctionWithNewTarget(r4, r6, r3, InvokeType::kCall);
567 }
568
569 // If the result is an object (in the ECMA sense), we should get rid
570 // of the receiver and use the result; see ECMA-262 section 13.2.2-7
571 // on page 74.
572 Label use_receiver, do_throw, leave_and_return, check_receiver;
573
574 // If the result is undefined, we jump out to using the implicit receiver.
575 __ JumpIfNotRoot(r3, RootIndex::kUndefinedValue, &check_receiver);
576
577 // Otherwise we do a smi check and fall through to check if the return value
578 // is a valid receiver.
579
580 // Throw away the result of the constructor invocation and use the
581 // on-stack receiver as the result.
582 __ bind(&use_receiver);
583 __ LoadU64(r3, MemOperand(sp));
584 __ JumpIfRoot(r3, RootIndex::kTheHoleValue, &do_throw);
585
586 __ bind(&leave_and_return);
587 // Restore arguments count from the frame.
589 // Leave construct frame.
590 __ LeaveFrame(StackFrame::CONSTRUCT);
591
592 // Remove caller arguments from the stack and return.
593 __ DropArguments(r4);
594 __ blr();
595
596 __ bind(&check_receiver);
597 // If the result is a smi, it is *not* an object in the ECMA sense.
598 __ JumpIfSmi(r3, &use_receiver);
599
600 // If the type of the result (stored in its map) is less than
601 // FIRST_JS_RECEIVER_TYPE, it is not an object in the ECMA sense.
602 static_assert(LAST_JS_RECEIVER_TYPE == LAST_TYPE);
603 __ CompareObjectType(r3, r7, r7, FIRST_JS_RECEIVER_TYPE);
604 __ bge(&leave_and_return);
605 __ b(&use_receiver);
606
607 __ bind(&do_throw);
608 // Restore the context from the frame.
610 __ CallRuntime(Runtime::kThrowConstructorReturnedNonObject);
611 __ bkpt(0);
612
613 __ bind(&stack_overflow);
614 // Restore the context from the frame.
616 __ CallRuntime(Runtime::kThrowStackOverflow);
617 // Unreachable code.
618 __ bkpt(0);
619}
620
621void Builtins::Generate_JSBuiltinsConstructStub(MacroAssembler* masm) {
622 Generate_JSBuiltinsConstructStubHelper(masm);
623}
624
625// static
626void Builtins::Generate_ResumeGeneratorTrampoline(MacroAssembler* masm) {
627 // ----------- S t a t e -------------
628 // -- r3 : the value to pass to the generator
629 // -- r4 : the JSGeneratorObject to resume
630 // -- lr : return address
631 // -----------------------------------
632 // Store input value into generator object.
633 __ StoreTaggedField(
634 r3, FieldMemOperand(r4, JSGeneratorObject::kInputOrDebugPosOffset), r0);
635 __ RecordWriteField(r4, JSGeneratorObject::kInputOrDebugPosOffset, r3, r6,
637 // Check that r4 is still valid, RecordWrite might have clobbered it.
638 __ AssertGeneratorObject(r4);
639
640 // Load suspended function and context.
641 __ LoadTaggedField(
642 r7, FieldMemOperand(r4, JSGeneratorObject::kFunctionOffset), r0);
643 __ LoadTaggedField(cp, FieldMemOperand(r7, JSFunction::kContextOffset), r0);
644
645 // Flood function if we are stepping.
646 Label prepare_step_in_if_stepping, prepare_step_in_suspended_generator;
647 Label stepping_prepared;
648 Register scratch = r8;
649 ExternalReference debug_hook =
650 ExternalReference::debug_hook_on_function_call_address(masm->isolate());
651 __ Move(scratch, debug_hook);
652 __ LoadU8(scratch, MemOperand(scratch), r0);
653 __ extsb(scratch, scratch);
654 __ CmpSmiLiteral(scratch, Smi::zero(), r0);
655 __ bne(&prepare_step_in_if_stepping);
656
657 // Flood function if we need to continue stepping in the suspended generator.
658
659 ExternalReference debug_suspended_generator =
660 ExternalReference::debug_suspended_generator_address(masm->isolate());
661
662 __ Move(scratch, debug_suspended_generator);
663 __ LoadU64(scratch, MemOperand(scratch));
664 __ CmpS64(scratch, r4);
665 __ beq(&prepare_step_in_suspended_generator);
666 __ bind(&stepping_prepared);
667
668 // Check the stack for overflow. We are not trying to catch interruptions
669 // (i.e. debug break and preemption) here, so check the "real stack limit".
670 Label stack_overflow;
671 __ LoadStackLimit(scratch, StackLimitKind::kRealStackLimit, r0);
672 __ CmpU64(sp, scratch);
673 __ blt(&stack_overflow);
674
675 // ----------- S t a t e -------------
676 // -- r4 : the JSGeneratorObject to resume
677 // -- r7 : generator function
678 // -- cp : generator context
679 // -- lr : return address
680 // -----------------------------------
681
682 // Copy the function arguments from the generator object's register file.
683 __ LoadTaggedField(
684 r6, FieldMemOperand(r7, JSFunction::kSharedFunctionInfoOffset), r0);
685 __ LoadU16(
686 r6, FieldMemOperand(r6, SharedFunctionInfo::kFormalParameterCountOffset));
687 __ subi(r6, r6, Operand(kJSArgcReceiverSlots));
688 __ LoadTaggedField(
689 r5, FieldMemOperand(r4, JSGeneratorObject::kParametersAndRegistersOffset),
690 r0);
691 {
692 Label done_loop, loop;
693 __ bind(&loop);
694 __ subi(r6, r6, Operand(1));
695 __ cmpi(r6, Operand::Zero());
696 __ blt(&done_loop);
697 __ ShiftLeftU64(r10, r6, Operand(kTaggedSizeLog2));
698 __ add(scratch, r5, r10);
699 __ LoadTaggedField(
700 scratch, FieldMemOperand(scratch, OFFSET_OF_DATA_START(FixedArray)),
701 r0);
702 __ Push(scratch);
703 __ b(&loop);
704 __ bind(&done_loop);
705
706 // Push receiver.
707 __ LoadTaggedField(
708 scratch, FieldMemOperand(r4, JSGeneratorObject::kReceiverOffset), r0);
709 __ Push(scratch);
710 }
711
712 // Underlying function needs to have bytecode available.
713 if (v8_flags.debug_code) {
714 Label ok, is_baseline, is_unavailable;
715 Register sfi = r6;
716 Register bytecode = r6;
717 __ LoadTaggedField(
718 sfi, FieldMemOperand(r7, JSFunction::kSharedFunctionInfoOffset), r0);
719 GetSharedFunctionInfoBytecodeOrBaseline(masm, sfi, bytecode, ip,
720 &is_baseline, &is_unavailable);
721 __ b(&ok);
722
723 __ bind(&is_unavailable);
724 __ Abort(AbortReason::kMissingBytecodeArray);
725
726 __ bind(&is_baseline);
727 __ IsObjectType(bytecode, ip, ip, CODE_TYPE);
728 __ Assert(eq, AbortReason::kMissingBytecodeArray);
729
730 __ bind(&ok);
731 }
732
733 // Resume (Ignition/TurboFan) generator object.
734 {
735 __ LoadTaggedField(
736 r3, FieldMemOperand(r7, JSFunction::kSharedFunctionInfoOffset), r0);
737 __ LoadU16(r3, FieldMemOperand(
738 r3, SharedFunctionInfo::kFormalParameterCountOffset));
739 // We abuse new.target both to indicate that this is a resume call and to
740 // pass in the generator object. In ordinary calls, new.target is always
741 // undefined because generator functions are non-constructable.
742 __ mr(r6, r4);
743 __ mr(r4, r7);
744 __ JumpJSFunction(r4, r0);
745 }
746
747 __ bind(&prepare_step_in_if_stepping);
748 {
749 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
750 __ Push(r4, r7);
751 // Push hole as receiver since we do not use it for stepping.
752 __ PushRoot(RootIndex::kTheHoleValue);
753 __ CallRuntime(Runtime::kDebugOnFunctionCall);
754 __ Pop(r4);
755 __ LoadTaggedField(
756 r7, FieldMemOperand(r4, JSGeneratorObject::kFunctionOffset), r0);
757 }
758 __ b(&stepping_prepared);
759
760 __ bind(&prepare_step_in_suspended_generator);
761 {
762 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
763 __ Push(r4);
764 __ CallRuntime(Runtime::kDebugPrepareStepInSuspendedGenerator);
765 __ Pop(r4);
766 __ LoadTaggedField(
767 r7, FieldMemOperand(r4, JSGeneratorObject::kFunctionOffset), r0);
768 }
769 __ b(&stepping_prepared);
770
771 __ bind(&stack_overflow);
772 {
773 FrameScope scope(masm, StackFrame::INTERNAL);
774 __ CallRuntime(Runtime::kThrowStackOverflow);
775 __ bkpt(0); // This should be unreachable.
776 }
777}
778
779void Builtins::Generate_ConstructedNonConstructable(MacroAssembler* masm) {
780 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
781 __ push(r4);
782 __ CallRuntime(Runtime::kThrowConstructedNonConstructable);
783 __ Trap(); // Unreachable.
784}
785
786namespace {
787
788// Called with the native C calling convention. The corresponding function
789// signature is either:
790//
791// using JSEntryFunction = GeneratedCode<Address(
792// Address root_register_value, Address new_target, Address target,
793// Address receiver, intptr_t argc, Address** args)>;
794// or
795// using JSEntryFunction = GeneratedCode<Address(
796// Address root_register_value, MicrotaskQueue* microtask_queue)>;
797void Generate_JSEntryVariant(MacroAssembler* masm, StackFrame::Type type,
798 Builtin entry_trampoline) {
799 // The register state is either:
800 // r3: root_register_value
801 // r4: code entry
802 // r5: function
803 // r6: receiver
804 // r7: argc
805 // r8: argv
806 // or
807 // r3: root_register_value
808 // r4: microtask_queue
809
810 Label invoke, handler_entry, exit;
811
812 {
813 NoRootArrayScope no_root_array(masm);
814
815 // PPC LINUX ABI:
816 // preserve LR in pre-reserved slot in caller's frame
817 __ mflr(r0);
819
820 // Save callee saved registers on the stack.
821 __ MultiPush(kCalleeSaved);
822
823 // Save callee-saved double registers.
824 __ MultiPushDoubles(kCalleeSavedDoubles);
825 // Set up the reserved register for 0.0.
826 __ LoadDoubleLiteral(kDoubleRegZero, base::Double(0.0), r0);
827
828 // Initialize the root register.
829 // C calling convention. The first argument is passed in r3.
830 __ mr(kRootRegister, r3);
831
832#ifdef V8_COMPRESS_POINTERS
833 // Initialize the pointer cage base register.
834 __ LoadRootRelative(kPtrComprCageBaseRegister,
835 IsolateData::cage_base_offset());
836#endif
837 }
838
839 // Push a frame with special values setup to mark it as an entry frame.
840 // r4: code entry
841 // r5: function
842 // r6: receiver
843 // r7: argc
844 // r8: argv
845 // Clear c_entry_fp, now we've pushed its previous value to the stack.
846 // If the c_entry_fp is not already zero and we don't clear it, the
847 // StackFrameIteratorForProfiler will assume we are executing C++ and miss the
848 // JS frames on top.
849 __ li(r0, Operand(-1)); // Push a bad frame pointer to fail if it is used.
850 __ push(r0);
854 }
855 __ mov(r0, Operand(StackFrame::TypeToMarker(type)));
856 __ push(r0);
857 __ push(r0);
858
859 __ mov(r0, Operand::Zero());
860 __ Move(ip, ExternalReference::Create(IsolateAddressId::kCEntryFPAddress,
861 masm->isolate()));
862 __ LoadU64(r3, MemOperand(ip));
863 __ StoreU64(r0, MemOperand(ip));
864 __ push(r3);
865
866 __ LoadIsolateField(ip, IsolateFieldId::kFastCCallCallerFP);
867 __ LoadU64(r3, MemOperand(ip));
868 __ StoreU64(r0, MemOperand(ip));
869 __ push(r3);
870
871 __ LoadIsolateField(ip, IsolateFieldId::kFastCCallCallerPC);
872 __ LoadU64(r3, MemOperand(ip));
873 __ StoreU64(r0, MemOperand(ip));
874 __ push(r3);
875
876 Register scratch = r9;
877 // Set up frame pointer for the frame to be pushed.
879
880 // If this is the outermost JS call, set js_entry_sp value.
881 Label non_outermost_js;
882 ExternalReference js_entry_sp =
883 ExternalReference::Create(IsolateAddressId::kJSEntrySPAddress,
884 masm->isolate());
885 __ Move(r3, js_entry_sp);
886 __ LoadU64(scratch, MemOperand(r3));
887 __ cmpi(scratch, Operand::Zero());
888 __ bne(&non_outermost_js);
889 __ StoreU64(fp, MemOperand(r3));
890 __ mov(scratch, Operand(StackFrame::OUTERMOST_JSENTRY_FRAME));
891 Label cont;
892 __ b(&cont);
893 __ bind(&non_outermost_js);
894 __ mov(scratch, Operand(StackFrame::INNER_JSENTRY_FRAME));
895 __ bind(&cont);
896 __ push(scratch); // frame-type
897
898 // Jump to a faked try block that does the invoke, with a faked catch
899 // block that sets the exception.
900 __ b(&invoke);
901
902 // Block literal pool emission whilst taking the position of the handler
903 // entry. This avoids making the assumption that literal pools are always
904 // emitted after an instruction is emitted, rather than before.
905 {
906 ConstantPoolUnavailableScope constant_pool_unavailable(masm);
907 __ bind(&handler_entry);
908
909 // Store the current pc as the handler offset. It's used later to create the
910 // handler table.
911 masm->isolate()->builtins()->SetJSEntryHandlerOffset(handler_entry.pos());
912
913 // Caught exception: Store result (exception) in the exception
914 // field in the JSEnv and return a failure sentinel. Coming in here the
915 // fp will be invalid because the PushStackHandler below sets it to 0 to
916 // signal the existence of the JSEntry frame.
917 __ Move(scratch, ExternalReference::Create(
918 IsolateAddressId::kExceptionAddress, masm->isolate()));
919 }
920
921 __ StoreU64(r3, MemOperand(scratch));
922 __ LoadRoot(r3, RootIndex::kException);
923 __ b(&exit);
924
925 // Invoke: Link this frame into the handler chain.
926 __ bind(&invoke);
927 // Must preserve r4-r8.
928 __ PushStackHandler();
929 // If an exception not caught by another handler occurs, this handler
930 // returns control to the code after the b(&invoke) above, which
931 // restores all kCalleeSaved registers (including cp and fp) to their
932 // saved values before returning a failure to C.
933
934 // Invoke the function by calling through JS entry trampoline builtin.
935 // Notice that we cannot store a reference to the trampoline code directly in
936 // this stub, because runtime stubs are not traversed when doing GC.
937
938 // Invoke the function by calling through JS entry trampoline builtin and
939 // pop the faked function when we return.
940 __ CallBuiltin(entry_trampoline);
941
942 // Unlink this frame from the handler chain.
943 __ PopStackHandler();
944
945 __ bind(&exit); // r3 holds result
946 // Check if the current stack frame is marked as the outermost JS frame.
947 Label non_outermost_js_2;
948 __ pop(r8);
949 __ cmpi(r8, Operand(StackFrame::OUTERMOST_JSENTRY_FRAME));
950 __ bne(&non_outermost_js_2);
951 __ mov(scratch, Operand::Zero());
952 __ Move(r8, js_entry_sp);
953 __ StoreU64(scratch, MemOperand(r8));
954 __ bind(&non_outermost_js_2);
955
956 // Restore the top frame descriptors from the stack.
957 __ pop(r6);
958 __ LoadIsolateField(scratch, IsolateFieldId::kFastCCallCallerPC);
959 __ StoreU64(r6, MemOperand(scratch));
960
961 __ pop(r6);
962 __ LoadIsolateField(scratch, IsolateFieldId::kFastCCallCallerFP);
963 __ StoreU64(r6, MemOperand(scratch));
964
965 __ pop(r6);
966 __ Move(scratch, ExternalReference::Create(IsolateAddressId::kCEntryFPAddress,
967 masm->isolate()));
968 __ StoreU64(r6, MemOperand(scratch));
969
970 // Reset the stack to the callee saved registers.
971 __ addi(sp, sp, Operand(-EntryFrameConstants::kNextExitFrameFPOffset));
972
973 // Restore callee-saved double registers.
974 __ MultiPopDoubles(kCalleeSavedDoubles);
975
976 // Restore callee-saved registers.
977 __ MultiPop(kCalleeSaved);
978
979 // Return
981 __ mtlr(r0);
982 __ blr();
983}
984
985} // namespace
986
987void Builtins::Generate_JSEntry(MacroAssembler* masm) {
988 Generate_JSEntryVariant(masm, StackFrame::ENTRY, Builtin::kJSEntryTrampoline);
989}
990
991void Builtins::Generate_JSConstructEntry(MacroAssembler* masm) {
992 Generate_JSEntryVariant(masm, StackFrame::CONSTRUCT_ENTRY,
993 Builtin::kJSConstructEntryTrampoline);
994}
995
996void Builtins::Generate_JSRunMicrotasksEntry(MacroAssembler* masm) {
997 Generate_JSEntryVariant(masm, StackFrame::ENTRY,
998 Builtin::kRunMicrotasksTrampoline);
999}
1000
1001static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm,
1002 bool is_construct) {
1003 // Called from Generate_JS_Entry
1004 // r4: new.target
1005 // r5: function
1006 // r6: receiver
1007 // r7: argc
1008 // r8: argv
1009 // r0,r3,r9, cp may be clobbered
1010
1011 // Enter an internal frame.
1012 {
1013 FrameScope scope(masm, StackFrame::INTERNAL);
1014
1015 // Setup the context (we need to use the caller context from the isolate).
1016 ExternalReference context_address = ExternalReference::Create(
1017 IsolateAddressId::kContextAddress, masm->isolate());
1018 __ Move(cp, context_address);
1019 __ LoadU64(cp, MemOperand(cp));
1020
1021 // Push the function.
1022 __ Push(r5);
1023
1024 // Check if we have enough stack space to push all arguments.
1025 Label enough_stack_space, stack_overflow;
1026 __ mr(r3, r7);
1027 __ StackOverflowCheck(r3, r9, &stack_overflow);
1028 __ b(&enough_stack_space);
1029 __ bind(&stack_overflow);
1030 __ CallRuntime(Runtime::kThrowStackOverflow);
1031 // Unreachable code.
1032 __ bkpt(0);
1033
1034 __ bind(&enough_stack_space);
1035
1036 // Copy arguments to the stack.
1037 // r4: function
1038 // r7: argc
1039 // r8: argv, i.e. points to first arg
1040 Generate_PushArguments(masm, r8, r7, r9, ArgumentsElementType::kHandle);
1041
1042 // Push the receiver.
1043 __ Push(r6);
1044
1045 // r3: argc
1046 // r4: function
1047 // r6: new.target
1048 __ mr(r3, r7);
1049 __ mr(r6, r4);
1050 __ mr(r4, r5);
1051
1052 // Initialize all JavaScript callee-saved registers, since they will be seen
1053 // by the garbage collector as part of handlers.
1054 __ LoadRoot(r7, RootIndex::kUndefinedValue);
1055 __ mr(r8, r7);
1056 __ mr(r14, r7);
1057 __ mr(r15, r7);
1058 __ mr(r16, r7);
1059 __ mr(r17, r7);
1060
1061 // Invoke the code.
1062 Builtin builtin = is_construct ? Builtin::kConstruct : Builtins::Call();
1063 __ CallBuiltin(builtin);
1064
1065 // Exit the JS frame and remove the parameters (except function), and
1066 // return.
1067 }
1068 __ blr();
1069
1070 // r3: result
1071}
1072
1073void Builtins::Generate_JSEntryTrampoline(MacroAssembler* masm) {
1075}
1076
1077void Builtins::Generate_JSConstructEntryTrampoline(MacroAssembler* masm) {
1079}
1080
1081void Builtins::Generate_RunMicrotasksTrampoline(MacroAssembler* masm) {
1082 // This expects two C++ function parameters passed by Invoke() in
1083 // execution.cc.
1084 // r3: root_register_value
1085 // r4: microtask_queue
1086
1088 __ TailCallBuiltin(Builtin::kRunMicrotasks);
1089}
1090
1091static void LeaveInterpreterFrame(MacroAssembler* masm, Register scratch1,
1092 Register scratch2) {
1093 Register params_size = scratch1;
1094 // Get the size of the formal parameters + receiver (in bytes).
1095 __ LoadU64(params_size,
1097 __ LoadU16(params_size,
1098 FieldMemOperand(params_size, BytecodeArray::kParameterSizeOffset));
1099
1100 Register actual_params_size = scratch2;
1101 // Compute the size of the actual parameters + receiver (in bytes).
1102 __ LoadU64(actual_params_size,
1104
1105 // If actual is bigger than formal, then we should use it to free up the stack
1106 // arguments.
1107 Label corrected_args_count;
1108 __ CmpS64(params_size, actual_params_size);
1109 __ bge(&corrected_args_count);
1110 __ mr(params_size, actual_params_size);
1111 __ bind(&corrected_args_count);
1112 // Leave the frame (also dropping the register file).
1113 __ LeaveFrame(StackFrame::INTERPRETED);
1114
1115 __ DropArguments(params_size);
1116}
1117
1118// Advance the current bytecode offset. This simulates what all bytecode
1119// handlers do upon completion of the underlying operation. Will bail out to a
1120// label if the bytecode (without prefix) is a return bytecode. Will not advance
1121// the bytecode offset if the current bytecode is a JumpLoop, instead just
1122// re-executing the JumpLoop to jump to the correct bytecode.
1123static void AdvanceBytecodeOffsetOrReturn(MacroAssembler* masm,
1124 Register bytecode_array,
1125 Register bytecode_offset,
1126 Register bytecode, Register scratch1,
1127 Register scratch2, Label* if_return) {
1128 Register bytecode_size_table = scratch1;
1129 Register scratch3 = bytecode;
1130
1131 // The bytecode offset value will be increased by one in wide and extra wide
1132 // cases. In the case of having a wide or extra wide JumpLoop bytecode, we
1133 // will restore the original bytecode. In order to simplify the code, we have
1134 // a backup of it.
1135 Register original_bytecode_offset = scratch2;
1136 DCHECK(!AreAliased(bytecode_array, bytecode_offset, bytecode_size_table,
1137 bytecode, original_bytecode_offset));
1138 __ Move(bytecode_size_table,
1139 ExternalReference::bytecode_size_table_address());
1140 __ Move(original_bytecode_offset, bytecode_offset);
1141
1142 // Check if the bytecode is a Wide or ExtraWide prefix bytecode.
1143 Label process_bytecode, extra_wide;
1144 static_assert(0 == static_cast<int>(interpreter::Bytecode::kWide));
1145 static_assert(1 == static_cast<int>(interpreter::Bytecode::kExtraWide));
1146 static_assert(2 == static_cast<int>(interpreter::Bytecode::kDebugBreakWide));
1147 static_assert(3 ==
1148 static_cast<int>(interpreter::Bytecode::kDebugBreakExtraWide));
1149 __ cmpi(bytecode, Operand(0x3));
1150 __ bgt(&process_bytecode);
1151 __ andi(r0, bytecode, Operand(0x1));
1152 __ bne(&extra_wide, cr0);
1153
1154 // Load the next bytecode and update table to the wide scaled table.
1155 __ addi(bytecode_offset, bytecode_offset, Operand(1));
1156 __ lbzx(bytecode, MemOperand(bytecode_array, bytecode_offset));
1157 __ addi(bytecode_size_table, bytecode_size_table,
1159 __ b(&process_bytecode);
1160
1161 __ bind(&extra_wide);
1162 // Load the next bytecode and update table to the extra wide scaled table.
1163 __ addi(bytecode_offset, bytecode_offset, Operand(1));
1164 __ lbzx(bytecode, MemOperand(bytecode_array, bytecode_offset));
1165 __ addi(bytecode_size_table, bytecode_size_table,
1167
1168 // Load the size of the current bytecode.
1169 __ bind(&process_bytecode);
1170
1171 // Bailout to the return label if this is a return bytecode.
1172#define JUMP_IF_EQUAL(NAME) \
1173 __ cmpi(bytecode, \
1174 Operand(static_cast<int>(interpreter::Bytecode::k##NAME))); \
1175 __ beq(if_return);
1177#undef JUMP_IF_EQUAL
1178
1179 // If this is a JumpLoop, re-execute it to perform the jump to the beginning
1180 // of the loop.
1181 Label end, not_jump_loop;
1182 __ cmpi(bytecode,
1183 Operand(static_cast<int>(interpreter::Bytecode::kJumpLoop)));
1184 __ bne(&not_jump_loop);
1185 // We need to restore the original bytecode_offset since we might have
1186 // increased it to skip the wide / extra-wide prefix bytecode.
1187 __ Move(bytecode_offset, original_bytecode_offset);
1188 __ b(&end);
1189
1190 __ bind(&not_jump_loop);
1191 // Otherwise, load the size of the current bytecode and advance the offset.
1192 __ lbzx(scratch3, MemOperand(bytecode_size_table, bytecode));
1193 __ add(bytecode_offset, bytecode_offset, scratch3);
1194
1195 __ bind(&end);
1196}
1197
1198// static
1199void Builtins::Generate_BaselineOutOfLinePrologue(MacroAssembler* masm) {
1200 auto descriptor =
1201 Builtins::CallInterfaceDescriptorFor(Builtin::kBaselineOutOfLinePrologue);
1202 Register closure = descriptor.GetRegisterParameter(
1203 BaselineOutOfLinePrologueDescriptor::kClosure);
1204 // Load the feedback cell and vector from the closure.
1205 Register feedback_cell = r7;
1206 Register feedback_vector = ip;
1207 __ LoadTaggedField(feedback_cell,
1208 FieldMemOperand(closure, JSFunction::kFeedbackCellOffset),
1209 r0);
1210 __ LoadTaggedField(feedback_vector,
1211 FieldMemOperand(feedback_cell, FeedbackCell::kValueOffset),
1212 r0);
1213 __ AssertFeedbackVector(feedback_vector, r11);
1214
1215#ifndef V8_ENABLE_LEAPTIERING
1216 // Check for an tiering state.
1217 Label flags_need_processing;
1218 Register flags = r10;
1219 {
1220 __ LoadFeedbackVectorFlagsAndJumpIfNeedsProcessing(
1221 flags, feedback_vector, CodeKind::BASELINE, &flags_need_processing);
1222 }
1223#endif // !V8_ENABLE_LEAPTIERING
1224
1225 { ResetFeedbackVectorOsrUrgency(masm, feedback_vector, r11, r0); }
1226
1227 // Increment invocation count for the function.
1228 {
1229 Register invocation_count = r11;
1230 __ LoadU32(invocation_count,
1231 FieldMemOperand(feedback_vector,
1232 FeedbackVector::kInvocationCountOffset),
1233 r0);
1234 __ AddS32(invocation_count, invocation_count, Operand(1));
1235 __ StoreU32(invocation_count,
1236 FieldMemOperand(feedback_vector,
1237 FeedbackVector::kInvocationCountOffset),
1238 r0);
1239 }
1240
1241 FrameScope frame_scope(masm, StackFrame::MANUAL);
1242 {
1243 ASM_CODE_COMMENT_STRING(masm, "Frame Setup");
1244 // Normally the first thing we'd do here is Push(lr, fp), but we already
1245 // entered the frame in BaselineCompiler::Prologue, as we had to use the
1246 // value lr before the call to this BaselineOutOfLinePrologue builtin.
1247
1248 Register callee_context = descriptor.GetRegisterParameter(
1249 BaselineOutOfLinePrologueDescriptor::kCalleeContext);
1250 Register callee_js_function = descriptor.GetRegisterParameter(
1251 BaselineOutOfLinePrologueDescriptor::kClosure);
1252 ResetJSFunctionAge(masm, callee_js_function, r11, r0);
1253 __ Push(callee_context, callee_js_function);
1254 DCHECK_EQ(callee_js_function, kJavaScriptCallTargetRegister);
1255 DCHECK_EQ(callee_js_function, kJSFunctionRegister);
1256
1257 Register argc = descriptor.GetRegisterParameter(
1258 BaselineOutOfLinePrologueDescriptor::kJavaScriptCallArgCount);
1259 // We'll use the bytecode for both code age/OSR resetting, and pushing onto
1260 // the frame, so load it into a register.
1261 Register bytecodeArray = descriptor.GetRegisterParameter(
1262 BaselineOutOfLinePrologueDescriptor::kInterpreterBytecodeArray);
1263
1264 __ Push(argc, bytecodeArray);
1265
1266 if (v8_flags.debug_code) {
1267 Register scratch = r11;
1268 __ CompareObjectType(feedback_vector, scratch, scratch,
1269 FEEDBACK_VECTOR_TYPE);
1270 __ Assert(eq, AbortReason::kExpectedFeedbackVector);
1271 }
1272 __ Push(feedback_cell);
1273 __ Push(feedback_vector);
1274 }
1275
1276 Label call_stack_guard;
1277 Register frame_size = descriptor.GetRegisterParameter(
1278 BaselineOutOfLinePrologueDescriptor::kStackFrameSize);
1279 {
1280 ASM_CODE_COMMENT_STRING(masm, "Stack/interrupt check");
1281 // Stack check. This folds the checks for both the interrupt stack limit
1282 // check and the real stack limit into one by just checking for the
1283 // interrupt limit. The interrupt limit is either equal to the real stack
1284 // limit or tighter. By ensuring we have space until that limit after
1285 // building the frame we can quickly precheck both at once.
1286
1287 Register sp_minus_frame_size = r11;
1288 Register interrupt_limit = r0;
1289 __ SubS64(sp_minus_frame_size, sp, frame_size);
1290 __ LoadStackLimit(interrupt_limit, StackLimitKind::kInterruptStackLimit,
1291 r0);
1292 __ CmpU64(sp_minus_frame_size, interrupt_limit);
1293 __ blt(&call_stack_guard);
1294 }
1295
1296 // Do "fast" return to the caller pc in lr.
1297 __ LoadRoot(kInterpreterAccumulatorRegister, RootIndex::kUndefinedValue);
1298 __ Ret();
1299
1300#ifndef V8_ENABLE_LEAPTIERING
1301 __ bind(&flags_need_processing);
1302 {
1303 ASM_CODE_COMMENT_STRING(masm, "Optimized marker check");
1304
1305 // Drop the frame created by the baseline call.
1307 __ Pop(r0, fp, kConstantPoolRegister);
1308 } else {
1309 __ Pop(r0, fp);
1310 }
1311 __ mtlr(r0);
1312 __ OptimizeCodeOrTailCallOptimizedCodeSlot(flags, feedback_vector);
1313 __ Trap();
1314 }
1315#endif // !V8_ENABLE_LEAPTIERING
1316
1317 __ bind(&call_stack_guard);
1318 {
1319 ASM_CODE_COMMENT_STRING(masm, "Stack/interrupt call");
1320 FrameScope frame_scope(masm, StackFrame::INTERNAL);
1321 // Save incoming new target or generator
1323 __ SmiTag(frame_size);
1324 __ Push(frame_size);
1325 __ CallRuntime(Runtime::kStackGuardWithGap);
1327 }
1328
1329 __ LoadRoot(kInterpreterAccumulatorRegister, RootIndex::kUndefinedValue);
1330 __ Ret();
1331}
1332
1333// static
1334void Builtins::Generate_BaselineOutOfLinePrologueDeopt(MacroAssembler* masm) {
1335 // We're here because we got deopted during BaselineOutOfLinePrologue's stack
1336 // check. Undo all its frame creation and call into the interpreter instead.
1337
1338 // Drop the feedback vector, the bytecode offset (was the feedback vector but
1339 // got replaced during deopt) and bytecode array.
1340 __ Drop(3);
1341
1342 // Context, closure, argc.
1345
1346 // Drop frame pointer
1347 __ LeaveFrame(StackFrame::BASELINE);
1348
1349 // Enter the interpreter.
1350 __ TailCallBuiltin(Builtin::kInterpreterEntryTrampoline);
1351}
1352
1353// Generate code for entering a JS function with the interpreter.
1354// On entry to the function the receiver and arguments have been pushed on the
1355// stack left to right.
1356//
1357// The live registers are:
1358// o r3: actual argument count
1359// o r4: the JS function object being called.
1360// o r6: the incoming new target or generator object
1361// o cp: our context
1362// o pp: the caller's constant pool pointer (if enabled)
1363// o fp: the caller's frame pointer
1364// o sp: stack pointer
1365// o lr: return address
1366//
1367// The function builds an interpreter frame. See InterpreterFrameConstants in
1368// frame-constants.h for its layout.
1370 MacroAssembler* masm, InterpreterEntryTrampolineMode mode) {
1371 Register closure = r4;
1372
1373 // Get the bytecode array from the function object and load it into
1374 // kInterpreterBytecodeArrayRegister.
1375 Register sfi = r7;
1376 __ LoadTaggedField(
1377 sfi, FieldMemOperand(closure, JSFunction::kSharedFunctionInfoOffset), r0);
1378 ResetSharedFunctionInfoAge(masm, sfi, ip);
1379
1380 // The bytecode array could have been flushed from the shared function info,
1381 // if so, call into CompileLazy.
1382 Label is_baseline, compile_lazy;
1385 &is_baseline, &compile_lazy);
1386
1387 Label push_stack_frame;
1388 Register feedback_vector = r5;
1389 __ LoadFeedbackVector(feedback_vector, closure, r7, &push_stack_frame);
1390
1391#ifndef V8_JITLESS
1392#ifndef V8_ENABLE_LEAPTIERING
1393 // If feedback vector is valid, check for optimized code and update invocation
1394 // count.
1395
1396 Register flags = r7;
1397 Label flags_need_processing;
1398 __ LoadFeedbackVectorFlagsAndJumpIfNeedsProcessing(
1399 flags, feedback_vector, CodeKind::INTERPRETED_FUNCTION,
1400 &flags_need_processing);
1401#endif // !V8_ENABLE_LEAPTIERING
1402
1403 ResetFeedbackVectorOsrUrgency(masm, feedback_vector, ip, r0);
1404
1405 // Increment invocation count for the function.
1406 __ LoadU32(
1407 r8,
1408 FieldMemOperand(feedback_vector, FeedbackVector::kInvocationCountOffset),
1409 r0);
1410 __ addi(r8, r8, Operand(1));
1411 __ StoreU32(
1412 r8,
1413 FieldMemOperand(feedback_vector, FeedbackVector::kInvocationCountOffset),
1414 r0);
1415
1416 // Open a frame scope to indicate that there is a frame on the stack. The
1417 // MANUAL indicates that the scope shouldn't actually generate code to set up
1418 // the frame (that is done below).
1419
1420#else
1421 // Note: By omitting the above code in jitless mode we also disable:
1422 // - kFlagsLogNextExecution: only used for logging/profiling; and
1423 // - kInvocationCountOffset: only used for tiering heuristics and code
1424 // coverage.
1425#endif // !V8_JITLESS
1426
1427 __ bind(&push_stack_frame);
1428 FrameScope frame_scope(masm, StackFrame::MANUAL);
1429 __ PushStandardFrame(closure);
1430
1431 // Load initial bytecode offset.
1434
1435 // Push bytecode array and Smi tagged bytecode array offset.
1437 __ Push(kInterpreterBytecodeArrayRegister, r7, feedback_vector);
1438
1439 // Allocate the local and temporary register file on the stack.
1440 Label stack_overflow;
1441 {
1442 // Load frame size (word) from the BytecodeArray object.
1444 BytecodeArray::kFrameSizeOffset));
1445
1446 // Do a stack check to ensure we don't go over the limit.
1447 __ sub(r8, sp, r5);
1448 __ LoadStackLimit(ip, StackLimitKind::kRealStackLimit, r0);
1449 __ CmpU64(r8, ip);
1450 __ blt(&stack_overflow);
1451
1452 // If ok, push undefined as the initial value for all register file entries.
1453 // TODO(rmcilroy): Consider doing more than one push per loop iteration.
1454 Label loop, no_args;
1455 __ LoadRoot(kInterpreterAccumulatorRegister, RootIndex::kUndefinedValue);
1456 __ ShiftRightU64(r5, r5, Operand(kSystemPointerSizeLog2), SetRC);
1457 __ beq(&no_args, cr0);
1458 __ mtctr(r5);
1459 __ bind(&loop);
1461 __ bdnz(&loop);
1462 __ bind(&no_args);
1463 }
1464
1465 // If the bytecode array has a valid incoming new target or generator object
1466 // register, initialize it with incoming value which was passed in r6.
1467 Label no_incoming_new_target_or_generator_register;
1468 __ LoadS32(r8,
1471 BytecodeArray::kIncomingNewTargetOrGeneratorRegisterOffset),
1472 r0);
1473 __ cmpi(r8, Operand::Zero());
1474 __ beq(&no_incoming_new_target_or_generator_register);
1475 __ ShiftLeftU64(r8, r8, Operand(kSystemPointerSizeLog2));
1476 __ StoreU64(r6, MemOperand(fp, r8));
1477 __ bind(&no_incoming_new_target_or_generator_register);
1478
1479 // Perform interrupt stack check.
1480 // TODO(solanes): Merge with the real stack limit check above.
1481 Label stack_check_interrupt, after_stack_check_interrupt;
1482 __ LoadStackLimit(ip, StackLimitKind::kInterruptStackLimit, r0);
1483 __ CmpU64(sp, ip);
1484 __ blt(&stack_check_interrupt);
1485 __ bind(&after_stack_check_interrupt);
1486
1487 // The accumulator is already loaded with undefined.
1488
1489 // Load the dispatch table into a register and dispatch to the bytecode
1490 // handler at the current bytecode offset.
1491 Label do_dispatch;
1492 __ bind(&do_dispatch);
1493 __ Move(
1495 ExternalReference::interpreter_dispatch_table_address(masm->isolate()));
1498 __ ShiftLeftU64(r6, r6, Operand(kSystemPointerSizeLog2));
1502
1503 __ RecordComment("--- InterpreterEntryReturnPC point ---");
1505 masm->isolate()->heap()->SetInterpreterEntryReturnPCOffset(
1506 masm->pc_offset());
1507 } else {
1509 // Both versions must be the same up to this point otherwise the builtins
1510 // will not be interchangable.
1511 CHECK_EQ(
1512 masm->isolate()->heap()->interpreter_entry_return_pc_offset().value(),
1513 masm->pc_offset());
1514 }
1515
1516 // Any returns to the entry trampoline are either due to the return bytecode
1517 // or the interpreter tail calling a builtin and then a dispatch.
1518
1519 // Get bytecode array and bytecode offset from the stack frame.
1525
1526 // Either return, or advance to the next bytecode and dispatch.
1527 Label do_return;
1532 &do_return);
1533 __ b(&do_dispatch);
1534
1535 __ bind(&do_return);
1536 // The return value is in r3.
1537 LeaveInterpreterFrame(masm, r5, r7);
1538 __ blr();
1539
1540 __ bind(&stack_check_interrupt);
1541 // Modify the bytecode offset in the stack to be kFunctionEntryBytecodeOffset
1542 // for the call to the StackGuard.
1548 __ CallRuntime(Runtime::kStackGuard);
1549
1550 // After the call, restore the bytecode array, bytecode offset and accumulator
1551 // registers again. Also, restore the bytecode offset in the stack to its
1552 // previous value.
1557 __ LoadRoot(kInterpreterAccumulatorRegister, RootIndex::kUndefinedValue);
1558
1560 __ StoreU64(r0,
1562
1563 __ jmp(&after_stack_check_interrupt);
1564
1565#ifndef V8_JITLESS
1566#ifndef V8_ENABLE_LEAPTIERING
1567 __ bind(&flags_need_processing);
1568 __ OptimizeCodeOrTailCallOptimizedCodeSlot(flags, feedback_vector);
1569#endif // !V8_ENABLE_LEAPTIERING
1570
1571 __ bind(&is_baseline);
1572 {
1573#ifndef V8_ENABLE_LEAPTIERING
1574 // Load the feedback vector from the closure.
1575 __ LoadTaggedField(
1576 feedback_vector,
1577 FieldMemOperand(closure, JSFunction::kFeedbackCellOffset), r0);
1578 __ LoadTaggedField(
1579 feedback_vector,
1580 FieldMemOperand(feedback_vector, FeedbackCell::kValueOffset), r0);
1581
1582 Label install_baseline_code;
1583 // Check if feedback vector is valid. If not, call prepare for baseline to
1584 // allocate it.
1585 __ LoadTaggedField(
1586 ip, FieldMemOperand(feedback_vector, HeapObject::kMapOffset), r0);
1587 __ LoadU16(ip, FieldMemOperand(ip, Map::kInstanceTypeOffset));
1588 __ CmpS32(ip, Operand(FEEDBACK_VECTOR_TYPE), r0);
1589 __ b(ne, &install_baseline_code);
1590
1591 // Check for an tiering state.
1592 __ LoadFeedbackVectorFlagsAndJumpIfNeedsProcessing(
1593 flags, feedback_vector, CodeKind::BASELINE, &flags_need_processing);
1594
1595 // Load the baseline code into the closure.
1597 static_assert(kJavaScriptCallCodeStartRegister == r5, "ABI mismatch");
1598 __ ReplaceClosureCodeWithOptimizedCode(r5, closure, ip, r7);
1599 __ JumpCodeObject(r5);
1600
1601 __ bind(&install_baseline_code);
1602#endif // !V8_ENABLE_LEAPTIERING
1603 __ GenerateTailCallToReturnedCode(Runtime::kInstallBaselineCode);
1604 }
1605#endif // !V8_JITLESS
1606
1607 __ bind(&compile_lazy);
1608 __ GenerateTailCallToReturnedCode(Runtime::kCompileLazy);
1609
1610 __ bind(&stack_overflow);
1611 __ CallRuntime(Runtime::kThrowStackOverflow);
1612 __ bkpt(0); // Should not return.
1613}
1614
1615static void GenerateInterpreterPushArgs(MacroAssembler* masm, Register num_args,
1616 Register start_address,
1617 Register scratch) {
1618 ASM_CODE_COMMENT(masm);
1619 __ subi(scratch, num_args, Operand(1));
1620 __ ShiftLeftU64(scratch, scratch, Operand(kSystemPointerSizeLog2));
1621 __ sub(start_address, start_address, scratch);
1622 // Push the arguments.
1623 __ PushArray(start_address, num_args, scratch, r0,
1625}
1626
1627// static
1629 MacroAssembler* masm, ConvertReceiverMode receiver_mode,
1632 // ----------- S t a t e -------------
1633 // -- r3 : the number of arguments
1634 // -- r5 : the address of the first argument to be pushed. Subsequent
1635 // arguments should be consecutive above this, in the same order as
1636 // they are to be pushed onto the stack.
1637 // -- r4 : the target to call (can be any Object).
1638 // -----------------------------------
1639 Label stack_overflow;
1640
1642 // The spread argument should not be pushed.
1643 __ subi(r3, r3, Operand(1));
1644 }
1645
1646 if (receiver_mode == ConvertReceiverMode::kNullOrUndefined) {
1647 __ subi(r6, r3, Operand(kJSArgcReceiverSlots));
1648 } else {
1649 __ mr(r6, r3);
1650 }
1651
1652 __ StackOverflowCheck(r6, ip, &stack_overflow);
1653
1654 // Push the arguments.
1655 GenerateInterpreterPushArgs(masm, r6, r5, r7);
1656
1657 if (receiver_mode == ConvertReceiverMode::kNullOrUndefined) {
1658 __ PushRoot(RootIndex::kUndefinedValue);
1659 }
1660
1662 // Pass the spread in the register r3.
1663 // r2 already points to the penultimate argument, the spread
1664 // lies in the next interpreter register.
1665 __ LoadU64(r5, MemOperand(r5, -kSystemPointerSize));
1666 }
1667
1668 // Call the target.
1670 __ TailCallBuiltin(Builtin::kCallWithSpread);
1671 } else {
1672 __ TailCallBuiltin(Builtins::Call(receiver_mode));
1673 }
1674
1675 __ bind(&stack_overflow);
1676 {
1677 __ TailCallRuntime(Runtime::kThrowStackOverflow);
1678 // Unreachable Code.
1679 __ bkpt(0);
1680 }
1681}
1682
1683// static
1685 MacroAssembler* masm, InterpreterPushArgsMode mode) {
1686 // ----------- S t a t e -------------
1687 // -- r3 : argument count
1688 // -- r6 : new target
1689 // -- r4 : constructor to call
1690 // -- r5 : allocation site feedback if available, undefined otherwise.
1691 // -- r7 : address of the first argument
1692 // -----------------------------------
1693 Label stack_overflow;
1694 __ StackOverflowCheck(r3, ip, &stack_overflow);
1695
1697 // The spread argument should not be pushed.
1698 __ subi(r3, r3, Operand(1));
1699 }
1700
1701 Register argc_without_receiver = ip;
1702 __ subi(argc_without_receiver, r3, Operand(kJSArgcReceiverSlots));
1703
1704 // Push the arguments.
1705 GenerateInterpreterPushArgs(masm, argc_without_receiver, r7, r8);
1706
1707 // Push a slot for the receiver to be constructed.
1708 __ li(r0, Operand::Zero());
1709 __ push(r0);
1710
1712 // Pass the spread in the register r2.
1713 // r4 already points to the penultimate argument, the spread
1714 // lies in the next interpreter register.
1715 __ subi(r7, r7, Operand(kSystemPointerSize));
1716 __ LoadU64(r5, MemOperand(r7));
1717 } else {
1718 __ AssertUndefinedOrAllocationSite(r5, r8);
1719 }
1720
1722 __ AssertFunction(r4);
1723
1724 // Tail call to the array construct stub (still in the caller
1725 // context at this point).
1726 __ TailCallBuiltin(Builtin::kArrayConstructorImpl);
1727 } else if (mode == InterpreterPushArgsMode::kWithFinalSpread) {
1728 // Call the constructor with r3, r4, and r6 unmodified.
1729 __ TailCallBuiltin(Builtin::kConstructWithSpread);
1730 } else {
1732 // Call the constructor with r3, r4, and r6 unmodified.
1733 __ TailCallBuiltin(Builtin::kConstruct);
1734 }
1735
1736 __ bind(&stack_overflow);
1737 {
1738 __ TailCallRuntime(Runtime::kThrowStackOverflow);
1739 // Unreachable Code.
1740 __ bkpt(0);
1741 }
1742}
1743
1744// static
1746 MacroAssembler* masm, ForwardWhichFrame which_frame) {
1747 // ----------- S t a t e -------------
1748 // -- r6 : new target
1749 // -- r4 : constructor to call
1750 // -----------------------------------
1751 Label stack_overflow;
1752
1753 // Load the frame pointer into r7.
1754 switch (which_frame) {
1756 __ Move(r7, fp);
1757 break;
1760 r0);
1761 break;
1762 }
1763
1764 // Load the argument count into r3.
1766 __ StackOverflowCheck(r3, ip, &stack_overflow);
1767
1768 // Point r7 to the base of the argument list to forward, excluding the
1769 // receiver.
1770 __ addi(r7, r7,
1773
1774 // Copy arguments on the stack. r8 is a scratch register.
1775 Register argc_without_receiver = ip;
1776 __ subi(argc_without_receiver, r3, Operand(kJSArgcReceiverSlots));
1777 __ PushArray(r7, argc_without_receiver, r8, r0);
1778
1779 // Push a slot for the receiver to be constructed.
1780 __ li(r0, Operand::Zero());
1781 __ push(r0);
1782
1783 // Call the constructor with r3, r4, and r6 unmodified.
1784 __ TailCallBuiltin(Builtin::kConstruct);
1785
1786 __ bind(&stack_overflow);
1787 {
1788 __ TailCallRuntime(Runtime::kThrowStackOverflow);
1789 // Unreachable Code.
1790 __ bkpt(0);
1791 }
1792}
1793
1794namespace {
1795
1796void NewImplicitReceiver(MacroAssembler* masm) {
1797 // ----------- S t a t e -------------
1798 // -- r3 : argument count
1799 // -- r4 : constructor to call (checked to be a JSFunction)
1800 // -- r6 : new target
1801 //
1802 // Stack:
1803 // -- Implicit Receiver
1804 // -- [arguments without receiver]
1805 // -- Implicit Receiver
1806 // -- Context
1807 // -- FastConstructMarker
1808 // -- FramePointer
1809 // -----------------------------------
1810 Register implicit_receiver = r7;
1811
1812 // Save live registers.
1813 __ SmiTag(r3);
1814 __ Push(r3, r4, r6);
1815 __ CallBuiltin(Builtin::kFastNewObject);
1816 // Save result.
1817 __ Move(implicit_receiver, r3);
1818 // Restore live registers.
1819 __ Pop(r3, r4, r6);
1820 __ SmiUntag(r3);
1821
1822 // Patch implicit receiver (in arguments)
1823 __ StoreU64(implicit_receiver, MemOperand(sp, 0 * kSystemPointerSize), r0);
1824 // Patch second implicit (in construct frame)
1825 __ StoreU64(
1826 implicit_receiver,
1828
1829 // Restore context.
1831 r0);
1832}
1833
1834} // namespace
1835
1836// static
1837void Builtins::Generate_InterpreterPushArgsThenFastConstructFunction(
1838 MacroAssembler* masm) {
1839 // ----------- S t a t e -------------
1840 // -- r3 : argument count
1841 // -- r4 : constructor to call (checked to be a JSFunction)
1842 // -- r6 : new target
1843 // -- r7 : address of the first argument
1844 // -- cp/r30 : context pointer
1845 // -----------------------------------
1846 __ AssertFunction(r4);
1847
1848 // Check if target has a [[Construct]] internal method.
1849 Label non_constructor;
1850 __ LoadMap(r5, r4);
1851 __ lbz(r5, FieldMemOperand(r5, Map::kBitFieldOffset));
1852 __ TestBit(r5, Map::Bits1::IsConstructorBit::kShift, r0);
1853 __ beq(&non_constructor, cr0);
1854
1855 // Add a stack check before pushing arguments.
1856 Label stack_overflow;
1857 __ StackOverflowCheck(r3, r5, &stack_overflow);
1858
1859 // Enter a construct frame.
1860 FrameScope scope(masm, StackFrame::MANUAL);
1861 __ EnterFrame(StackFrame::FAST_CONSTRUCT);
1862 // Implicit receiver stored in the construct frame.
1863 __ LoadRoot(r5, RootIndex::kTheHoleValue);
1864 __ Push(cp, r5);
1865
1866 // Push arguments + implicit receiver.
1867 Register argc_without_receiver = r9;
1868 __ SubS64(argc_without_receiver, r3, Operand(kJSArgcReceiverSlots));
1869 // Push the arguments. r7 and r8 will be modified.
1870 GenerateInterpreterPushArgs(masm, argc_without_receiver, r7, r8);
1871 // Implicit receiver as part of the arguments (patched later if needed).
1872 __ push(r5);
1873
1874 // Check if it is a builtin call.
1875 Label builtin_call;
1876 __ LoadTaggedField(
1877 r5, FieldMemOperand(r4, JSFunction::kSharedFunctionInfoOffset), r0);
1878 __ lwz(r5, FieldMemOperand(r5, SharedFunctionInfo::kFlagsOffset));
1879 __ mov(ip, Operand(SharedFunctionInfo::ConstructAsBuiltinBit::kMask));
1880 __ and_(r0, r5, ip, SetRC);
1881 __ bne(&builtin_call, cr0);
1882
1883 // Check if we need to create an implicit receiver.
1884 Label not_create_implicit_receiver;
1885 __ DecodeField<SharedFunctionInfo::FunctionKindBits>(r5);
1886 __ JumpIfIsInRange(
1887 r5, r0, static_cast<uint32_t>(FunctionKind::kDefaultDerivedConstructor),
1888 static_cast<uint32_t>(FunctionKind::kDerivedConstructor),
1889 &not_create_implicit_receiver);
1890 NewImplicitReceiver(masm);
1891 __ bind(&not_create_implicit_receiver);
1892
1893 // Call the function.
1894 __ InvokeFunctionWithNewTarget(r4, r6, r3, InvokeType::kCall);
1895
1896 // ----------- S t a t e -------------
1897 // -- r0 constructor result
1898 //
1899 // Stack:
1900 // -- Implicit Receiver
1901 // -- Context
1902 // -- FastConstructMarker
1903 // -- FramePointer
1904 // -----------------------------------
1905
1906 // Store offset of return address for deoptimizer.
1907 masm->isolate()->heap()->SetConstructStubInvokeDeoptPCOffset(
1908 masm->pc_offset());
1909
1910 // If the result is an object (in the ECMA sense), we should get rid
1911 // of the receiver and use the result; see ECMA-262 section 13.2.2-7
1912 // on page 74.
1913 Label use_receiver, do_throw, leave_and_return, check_receiver;
1914
1915 // If the result is undefined, we jump out to using the implicit receiver.
1916 __ JumpIfNotRoot(r3, RootIndex::kUndefinedValue, &check_receiver);
1917
1918 // Otherwise we do a smi check and fall through to check if the return value
1919 // is a valid receiver.
1920
1921 // Throw away the result of the constructor invocation and use the
1922 // on-stack receiver as the result.
1923 __ bind(&use_receiver);
1924 __ LoadU64(
1926 r0);
1927 __ JumpIfRoot(r3, RootIndex::kTheHoleValue, &do_throw);
1928
1929 __ bind(&leave_and_return);
1930 // Leave construct frame.
1931 __ LeaveFrame(StackFrame::CONSTRUCT);
1932 __ blr();
1933
1934 __ bind(&check_receiver);
1935 // If the result is a smi, it is *not* an object in the ECMA sense.
1936 __ JumpIfSmi(r3, &use_receiver);
1937
1938 // If the type of the result (stored in its map) is less than
1939 // FIRST_JS_RECEIVER_TYPE, it is not an object in the ECMA sense.
1940 static_assert(LAST_JS_RECEIVER_TYPE == LAST_TYPE);
1941 __ CompareObjectType(r3, r7, r8, FIRST_JS_RECEIVER_TYPE);
1942 __ bge(&leave_and_return);
1943 __ b(&use_receiver);
1944
1945 __ bind(&builtin_call);
1946 // TODO(victorgomes): Check the possibility to turn this into a tailcall.
1947 __ InvokeFunctionWithNewTarget(r4, r6, r3, InvokeType::kCall);
1948 __ LeaveFrame(StackFrame::FAST_CONSTRUCT);
1949 __ blr();
1950
1951 __ bind(&do_throw);
1952 // Restore the context from the frame.
1954 r0);
1955 __ CallRuntime(Runtime::kThrowConstructorReturnedNonObject);
1956 __ bkpt(0);
1957
1958 __ bind(&stack_overflow);
1959 // Restore the context from the frame.
1960 __ TailCallRuntime(Runtime::kThrowStackOverflow);
1961 // Unreachable code.
1962 __ bkpt(0);
1963
1964 // Called Construct on an Object that doesn't have a [[Construct]] internal
1965 // method.
1966 __ bind(&non_constructor);
1967 __ TailCallBuiltin(Builtin::kConstructedNonConstructable);
1968}
1969
1970static void Generate_InterpreterEnterBytecode(MacroAssembler* masm) {
1971 // Set the return address to the correct point in the interpreter entry
1972 // trampoline.
1973 Label builtin_trampoline, trampoline_loaded;
1974 Tagged<Smi> interpreter_entry_return_pc_offset(
1975 masm->isolate()->heap()->interpreter_entry_return_pc_offset());
1976 DCHECK_NE(interpreter_entry_return_pc_offset, Smi::zero());
1977
1978 // If the SFI function_data is an InterpreterData, the function will have a
1979 // custom copy of the interpreter entry trampoline for profiling. If so,
1980 // get the custom trampoline, otherwise grab the entry address of the global
1981 // trampoline.
1983 __ LoadTaggedField(
1984 r5, FieldMemOperand(r5, JSFunction::kSharedFunctionInfoOffset), r0);
1985 __ LoadTrustedPointerField(
1986 r5, FieldMemOperand(r5, SharedFunctionInfo::kTrustedFunctionDataOffset),
1988 __ IsObjectType(r5, kInterpreterDispatchTableRegister,
1989 kInterpreterDispatchTableRegister, INTERPRETER_DATA_TYPE);
1990 __ bne(&builtin_trampoline);
1991
1992 __ LoadCodePointerField(
1993 r5, FieldMemOperand(r5, InterpreterData::kInterpreterTrampolineOffset),
1994 r6);
1995 __ LoadCodeInstructionStart(r5, r5);
1996 __ b(&trampoline_loaded);
1997
1998 __ bind(&builtin_trampoline);
1999 __ Move(r5, ExternalReference::
2000 address_of_interpreter_entry_trampoline_instruction_start(
2001 masm->isolate()));
2002 __ LoadU64(r5, MemOperand(r5));
2003
2004 __ bind(&trampoline_loaded);
2005 __ addi(r0, r5, Operand(interpreter_entry_return_pc_offset.value()));
2006 __ mtlr(r0);
2007
2008 // Initialize the dispatch table register.
2009 __ Move(
2011 ExternalReference::interpreter_dispatch_table_address(masm->isolate()));
2012
2013 // Get the bytecode array pointer from the frame.
2016
2017 if (v8_flags.debug_code) {
2018 // Check function data field is actually a BytecodeArray object.
2020 __ Assert(ne,
2021 AbortReason::kFunctionDataShouldBeBytecodeArrayOnInterpreterEntry,
2022 cr0);
2023 __ IsObjectType(kInterpreterBytecodeArrayRegister, r4, r0,
2024 BYTECODE_ARRAY_TYPE);
2025 __ Assert(
2026 eq, AbortReason::kFunctionDataShouldBeBytecodeArrayOnInterpreterEntry);
2027 }
2028
2029 // Get the target bytecode offset from the frame.
2033
2034 if (v8_flags.debug_code) {
2035 Label okay;
2039 __ bge(&okay);
2040 __ bkpt(0);
2041 __ bind(&okay);
2042 }
2043
2044 // Dispatch to the target bytecode.
2045 UseScratchRegisterScope temps(masm);
2046 Register scratch = temps.Acquire();
2049 __ ShiftLeftU64(scratch, scratch, Operand(kSystemPointerSizeLog2));
2053}
2054
2055void Builtins::Generate_InterpreterEnterAtNextBytecode(MacroAssembler* masm) {
2056 // Get bytecode array and bytecode offset from the stack frame.
2062
2063 Label enter_bytecode, function_entry_bytecode;
2067 __ beq(&function_entry_bytecode);
2068
2069 // Load the current bytecode.
2072
2073 // Advance to the next bytecode.
2074 Label if_return;
2077 &if_return);
2078
2079 __ bind(&enter_bytecode);
2080 // Convert new bytecode offset to a Smi and save in the stackframe.
2082 __ StoreU64(r5,
2084
2086
2087 __ bind(&function_entry_bytecode);
2088 // If the code deoptimizes during the implicit function entry stack interrupt
2089 // check, it will have a bailout ID of kFunctionEntryBytecodeOffset, which is
2090 // not a valid bytecode offset. Detect this case and advance to the first
2091 // actual bytecode.
2094 __ b(&enter_bytecode);
2095
2096 // We should never take the if_return path.
2097 __ bind(&if_return);
2098 __ Abort(AbortReason::kInvalidBytecodeAdvance);
2099}
2100
2101void Builtins::Generate_InterpreterEnterAtBytecode(MacroAssembler* masm) {
2103}
2104
2105namespace {
2106void Generate_ContinueToBuiltinHelper(MacroAssembler* masm,
2107 bool javascript_builtin,
2108 bool with_result) {
2109 const RegisterConfiguration* config(RegisterConfiguration::Default());
2110 int allocatable_register_count = config->num_allocatable_general_registers();
2111 Register scratch = ip;
2112 if (with_result) {
2113 if (javascript_builtin) {
2114 __ mr(scratch, r3);
2115 } else {
2116 // Overwrite the hole inserted by the deoptimizer with the return value
2117 // from the LAZY deopt point.
2118 __ StoreU64(
2119 r3, MemOperand(
2120 sp, config->num_allocatable_general_registers() *
2123 }
2124 }
2125 for (int i = allocatable_register_count - 1; i >= 0; --i) {
2126 int code = config->GetAllocatableGeneralCode(i);
2127 __ Pop(Register::from_code(code));
2128 if (javascript_builtin && code == kJavaScriptCallArgCountRegister.code()) {
2130 }
2131 }
2132 if (javascript_builtin && with_result) {
2133 // Overwrite the hole inserted by the deoptimizer with the return value from
2134 // the LAZY deopt point. r0 contains the arguments count, the return value
2135 // from LAZY is always the last argument.
2136 constexpr int return_value_offset =
2139 __ addi(r3, r3, Operand(return_value_offset));
2140 __ ShiftLeftU64(r0, r3, Operand(kSystemPointerSizeLog2));
2141 __ StoreU64(scratch, MemOperand(sp, r0));
2142 // Recover arguments count.
2143 __ subi(r3, r3, Operand(return_value_offset));
2144 }
2145 __ LoadU64(
2146 fp,
2148 // Load builtin index (stored as a Smi) and use it to get the builtin start
2149 // address from the builtins table.
2150 UseScratchRegisterScope temps(masm);
2151 Register builtin = temps.Acquire();
2152 __ Pop(builtin);
2153 __ addi(sp, sp,
2155 __ Pop(r0);
2156 __ mtlr(r0);
2157 __ LoadEntryFromBuiltinIndex(builtin, builtin);
2158 __ Jump(builtin);
2159}
2160} // namespace
2161
2162void Builtins::Generate_ContinueToCodeStubBuiltin(MacroAssembler* masm) {
2163 Generate_ContinueToBuiltinHelper(masm, false, false);
2164}
2165
2166void Builtins::Generate_ContinueToCodeStubBuiltinWithResult(
2167 MacroAssembler* masm) {
2168 Generate_ContinueToBuiltinHelper(masm, false, true);
2169}
2170
2171void Builtins::Generate_ContinueToJavaScriptBuiltin(MacroAssembler* masm) {
2172 Generate_ContinueToBuiltinHelper(masm, true, false);
2173}
2174
2175void Builtins::Generate_ContinueToJavaScriptBuiltinWithResult(
2176 MacroAssembler* masm) {
2177 Generate_ContinueToBuiltinHelper(masm, true, true);
2178}
2179
2180void Builtins::Generate_NotifyDeoptimized(MacroAssembler* masm) {
2181 {
2182 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
2183 __ CallRuntime(Runtime::kNotifyDeoptimized);
2184 }
2185
2187 __ LoadU64(r3, MemOperand(sp, 0 * kSystemPointerSize));
2188 __ addi(sp, sp, Operand(1 * kSystemPointerSize));
2189 __ Ret();
2190}
2191
2192void Builtins::Generate_InterpreterOnStackReplacement(MacroAssembler* masm) {
2193 using D = OnStackReplacementDescriptor;
2194 static_assert(D::kParameterCount == 2);
2195 OnStackReplacement(masm, OsrSourceTier::kInterpreter,
2196 D::MaybeTargetCodeRegister(),
2197 D::ExpectedParameterCountRegister());
2198}
2199
2200void Builtins::Generate_BaselineOnStackReplacement(MacroAssembler* masm) {
2201 using D = OnStackReplacementDescriptor;
2202 static_assert(D::kParameterCount == 2);
2203
2204 __ LoadU64(kContextRegister,
2206 OnStackReplacement(masm, OsrSourceTier::kBaseline,
2207 D::MaybeTargetCodeRegister(),
2208 D::ExpectedParameterCountRegister());
2209}
2210
2211// static
2212void Builtins::Generate_FunctionPrototypeApply(MacroAssembler* masm) {
2213 // ----------- S t a t e -------------
2214 // -- r3 : argc
2215 // -- sp[0] : receiver
2216 // -- sp[4] : thisArg
2217 // -- sp[8] : argArray
2218 // -----------------------------------
2219
2220 // 1. Load receiver into r4, argArray into r5 (if present), remove all
2221 // arguments from the stack (including the receiver), and push thisArg (if
2222 // present) instead.
2223 {
2224 __ LoadRoot(r8, RootIndex::kUndefinedValue);
2225 __ mr(r5, r8);
2226
2227 Label done;
2228 __ LoadU64(r4, MemOperand(sp)); // receiver
2229 __ CmpS64(r3, Operand(JSParameterCount(1)), r0);
2230 __ blt(&done);
2231 __ LoadU64(r8, MemOperand(sp, kSystemPointerSize)); // thisArg
2232 __ CmpS64(r3, Operand(JSParameterCount(2)), r0);
2233 __ blt(&done);
2234 __ LoadU64(r5, MemOperand(sp, 2 * kSystemPointerSize)); // argArray
2235
2236 __ bind(&done);
2237 __ DropArgumentsAndPushNewReceiver(r3, r8);
2238 }
2239
2240 // ----------- S t a t e -------------
2241 // -- r5 : argArray
2242 // -- r4 : receiver
2243 // -- sp[0] : thisArg
2244 // -----------------------------------
2245
2246 // 2. We don't need to check explicitly for callable receiver here,
2247 // since that's the first thing the Call/CallWithArrayLike builtins
2248 // will do.
2249
2250 // 3. Tail call with no arguments if argArray is null or undefined.
2251 Label no_arguments;
2252 __ JumpIfRoot(r5, RootIndex::kNullValue, &no_arguments);
2253 __ JumpIfRoot(r5, RootIndex::kUndefinedValue, &no_arguments);
2254
2255 // 4a. Apply the receiver to the given argArray.
2256 __ TailCallBuiltin(Builtin::kCallWithArrayLike);
2257
2258 // 4b. The argArray is either null or undefined, so we tail call without any
2259 // arguments to the receiver.
2260 __ bind(&no_arguments);
2261 {
2262 __ mov(r3, Operand(JSParameterCount(0)));
2263 __ TailCallBuiltin(Builtins::Call());
2264 }
2265}
2266
2267// static
2268void Builtins::Generate_FunctionPrototypeCall(MacroAssembler* masm) {
2269 // 1. Get the callable to call (passed as receiver) from the stack.
2270 __ Pop(r4);
2271
2272 // 2. Make sure we have at least one argument.
2273 // r3: actual number of arguments
2274 {
2275 Label done;
2276 __ CmpS64(r3, Operand(JSParameterCount(0)), r0);
2277 __ bne(&done);
2278 __ PushRoot(RootIndex::kUndefinedValue);
2279 __ addi(r3, r3, Operand(1));
2280 __ bind(&done);
2281 }
2282
2283 // 3. Adjust the actual number of arguments.
2284 __ subi(r3, r3, Operand(1));
2285
2286 // 4. Call the callable.
2287 __ TailCallBuiltin(Builtins::Call());
2288}
2289
2290void Builtins::Generate_ReflectApply(MacroAssembler* masm) {
2291 // ----------- S t a t e -------------
2292 // -- r3 : argc
2293 // -- sp[0] : receiver
2294 // -- sp[4] : target (if argc >= 1)
2295 // -- sp[8] : thisArgument (if argc >= 2)
2296 // -- sp[12] : argumentsList (if argc == 3)
2297 // -----------------------------------
2298
2299 // 1. Load target into r4 (if present), argumentsList into r5 (if present),
2300 // remove all arguments from the stack (including the receiver), and push
2301 // thisArgument (if present) instead.
2302 {
2303 __ LoadRoot(r4, RootIndex::kUndefinedValue);
2304 __ mr(r8, r4);
2305 __ mr(r5, r4);
2306
2307 Label done;
2308 __ CmpS64(r3, Operand(JSParameterCount(1)), r0);
2309 __ blt(&done);
2310 __ LoadU64(r4, MemOperand(sp, kSystemPointerSize)); // thisArg
2311 __ CmpS64(r3, Operand(JSParameterCount(2)), r0);
2312 __ blt(&done);
2313 __ LoadU64(r8, MemOperand(sp, 2 * kSystemPointerSize)); // argArray
2314 __ CmpS64(r3, Operand(JSParameterCount(3)), r0);
2315 __ blt(&done);
2316 __ LoadU64(r5, MemOperand(sp, 3 * kSystemPointerSize)); // argArray
2317
2318 __ bind(&done);
2319 __ DropArgumentsAndPushNewReceiver(r3, r8);
2320 }
2321
2322 // ----------- S t a t e -------------
2323 // -- r5 : argumentsList
2324 // -- r4 : target
2325 // -- sp[0] : thisArgument
2326 // -----------------------------------
2327
2328 // 2. We don't need to check explicitly for callable target here,
2329 // since that's the first thing the Call/CallWithArrayLike builtins
2330 // will do.
2331
2332 // 3. Apply the target to the given argumentsList.
2333 __ TailCallBuiltin(Builtin::kCallWithArrayLike);
2334}
2335
2336void Builtins::Generate_ReflectConstruct(MacroAssembler* masm) {
2337 // ----------- S t a t e -------------
2338 // -- r3 : argc
2339 // -- sp[0] : receiver
2340 // -- sp[4] : target
2341 // -- sp[8] : argumentsList
2342 // -- sp[12] : new.target (optional)
2343 // -----------------------------------
2344
2345 // 1. Load target into r4 (if present), argumentsList into r5 (if present),
2346 // new.target into r6 (if present, otherwise use target), remove all
2347 // arguments from the stack (including the receiver), and push thisArgument
2348 // (if present) instead.
2349 {
2350 __ LoadRoot(r4, RootIndex::kUndefinedValue);
2351 __ mr(r5, r4);
2352
2353 Label done;
2354 __ mr(r7, r4);
2355 __ CmpS64(r3, Operand(JSParameterCount(1)), r0);
2356 __ blt(&done);
2357 __ LoadU64(r4, MemOperand(sp, kSystemPointerSize)); // thisArg
2358 __ mr(r6, r4);
2359 __ CmpS64(r3, Operand(JSParameterCount(2)), r0);
2360 __ blt(&done);
2361 __ LoadU64(r5, MemOperand(sp, 2 * kSystemPointerSize)); // argArray
2362 __ CmpS64(r3, Operand(JSParameterCount(3)), r0);
2363 __ blt(&done);
2364 __ LoadU64(r6, MemOperand(sp, 3 * kSystemPointerSize)); // argArray
2365 __ bind(&done);
2366 __ DropArgumentsAndPushNewReceiver(r3, r7);
2367 }
2368
2369 // ----------- S t a t e -------------
2370 // -- r5 : argumentsList
2371 // -- r6 : new.target
2372 // -- r4 : target
2373 // -- sp[0] : receiver (undefined)
2374 // -----------------------------------
2375
2376 // 2. We don't need to check explicitly for constructor target here,
2377 // since that's the first thing the Construct/ConstructWithArrayLike
2378 // builtins will do.
2379
2380 // 3. We don't need to check explicitly for constructor new.target here,
2381 // since that's the second thing the Construct/ConstructWithArrayLike
2382 // builtins will do.
2383
2384 // 4. Construct the target with the given new.target and argumentsList.
2385 __ TailCallBuiltin(Builtin::kConstructWithArrayLike);
2386}
2387
2388namespace {
2389
2390// Allocate new stack space for |count| arguments and shift all existing
2391// arguments already on the stack. |pointer_to_new_space_out| points to the
2392// first free slot on the stack to copy additional arguments to and
2393// |argc_in_out| is updated to include |count|.
2394void Generate_AllocateSpaceAndShiftExistingArguments(
2395 MacroAssembler* masm, Register count, Register argc_in_out,
2396 Register pointer_to_new_space_out, Register scratch1, Register scratch2) {
2397 DCHECK(!AreAliased(count, argc_in_out, pointer_to_new_space_out, scratch1,
2398 scratch2));
2399 Register old_sp = scratch1;
2400 Register new_space = scratch2;
2401 __ addi(old_sp, sp, Operand(-kSystemPointerSize));
2402 __ ShiftLeftU64(new_space, count, Operand(kSystemPointerSizeLog2));
2403 __ AllocateStackSpace(new_space);
2404
2405 Register dest = pointer_to_new_space_out;
2406 __ addi(dest, sp, Operand(-kSystemPointerSize));
2407 Label loop, skip;
2408 __ mr(r0, argc_in_out);
2409 __ cmpi(r0, Operand::Zero());
2410 __ ble(&skip);
2411 __ mtctr(r0);
2412 __ bind(&loop);
2413 __ LoadU64WithUpdate(r0, MemOperand(old_sp, kSystemPointerSize));
2414 __ StoreU64WithUpdate(r0, MemOperand(dest, kSystemPointerSize));
2415 __ bdnz(&loop);
2416
2417 __ bind(&skip);
2418 // Update total number of arguments, restore dest.
2419 __ add(argc_in_out, argc_in_out, count);
2420 __ addi(dest, dest, Operand(kSystemPointerSize));
2421}
2422
2423} // namespace
2424
2425// static
2426// TODO(v8:11615): Observe Code::kMaxArguments in CallOrConstructVarargs
2427void Builtins::Generate_CallOrConstructVarargs(MacroAssembler* masm,
2428 Builtin target_builtin) {
2429 // ----------- S t a t e -------------
2430 // -- r4 : target
2431 // -- r3 : number of parameters on the stack
2432 // -- r5 : arguments list (a FixedArray)
2433 // -- r7 : len (number of elements to push from args)
2434 // -- r6 : new.target (for [[Construct]])
2435 // -----------------------------------
2436
2437 Register scratch = ip;
2438
2439 if (v8_flags.debug_code) {
2440 // Allow r5 to be a FixedArray, or a FixedDoubleArray if r7 == 0.
2441 Label ok, fail;
2442 __ AssertNotSmi(r5);
2443 __ LoadTaggedField(scratch, FieldMemOperand(r5, HeapObject::kMapOffset),
2444 r0);
2445 __ LoadU16(scratch, FieldMemOperand(scratch, Map::kInstanceTypeOffset));
2446 __ cmpi(scratch, Operand(FIXED_ARRAY_TYPE));
2447 __ beq(&ok);
2448 __ cmpi(scratch, Operand(FIXED_DOUBLE_ARRAY_TYPE));
2449 __ bne(&fail);
2450 __ cmpi(r7, Operand::Zero());
2451 __ beq(&ok);
2452 // Fall through.
2453 __ bind(&fail);
2454 __ Abort(AbortReason::kOperandIsNotAFixedArray);
2455
2456 __ bind(&ok);
2457 }
2458
2459 // Check for stack overflow.
2460 Label stack_overflow;
2461 __ StackOverflowCheck(r7, scratch, &stack_overflow);
2462
2463 // Move the arguments already in the stack,
2464 // including the receiver and the return address.
2465 // r7: Number of arguments to make room for.
2466 // r3: Number of arguments already on the stack.
2467 // r8: Points to first free slot on the stack after arguments were shifted.
2468 Generate_AllocateSpaceAndShiftExistingArguments(masm, r7, r3, r8, ip, r9);
2469
2470 // Push arguments onto the stack (thisArgument is already on the stack).
2471 {
2472 Label loop, no_args, skip;
2473 __ cmpi(r7, Operand::Zero());
2474 __ beq(&no_args);
2475 __ addi(r5, r5,
2476 Operand(OFFSET_OF_DATA_START(FixedArray) - kHeapObjectTag -
2477 kTaggedSize));
2478 __ mtctr(r7);
2479 __ bind(&loop);
2480 __ LoadTaggedField(scratch, MemOperand(r5, kTaggedSize), r0);
2481 __ addi(r5, r5, Operand(kTaggedSize));
2482 __ CompareRoot(scratch, RootIndex::kTheHoleValue);
2483 __ bne(&skip);
2484 __ LoadRoot(scratch, RootIndex::kUndefinedValue);
2485 __ bind(&skip);
2486 __ StoreU64(scratch, MemOperand(r8));
2487 __ addi(r8, r8, Operand(kSystemPointerSize));
2488 __ bdnz(&loop);
2489 __ bind(&no_args);
2490 }
2491
2492 // Tail-call to the actual Call or Construct builtin.
2493 __ TailCallBuiltin(target_builtin);
2494
2495 __ bind(&stack_overflow);
2496 __ TailCallRuntime(Runtime::kThrowStackOverflow);
2497}
2498
2499// static
2500void Builtins::Generate_CallOrConstructForwardVarargs(MacroAssembler* masm,
2501 CallOrConstructMode mode,
2502 Builtin target_builtin) {
2503 // ----------- S t a t e -------------
2504 // -- r3 : the number of arguments
2505 // -- r6 : the new.target (for [[Construct]] calls)
2506 // -- r4 : the target to call (can be any Object)
2507 // -- r5 : start index (to support rest parameters)
2508 // -----------------------------------
2509
2510 Register scratch = r9;
2511
2512 if (mode == CallOrConstructMode::kConstruct) {
2513 Label new_target_constructor, new_target_not_constructor;
2514 __ JumpIfSmi(r6, &new_target_not_constructor);
2515 __ LoadTaggedField(scratch, FieldMemOperand(r6, HeapObject::kMapOffset),
2516 r0);
2517 __ lbz(scratch, FieldMemOperand(scratch, Map::kBitFieldOffset));
2518 __ TestBit(scratch, Map::Bits1::IsConstructorBit::kShift, r0);
2519 __ bne(&new_target_constructor, cr0);
2520 __ bind(&new_target_not_constructor);
2521 {
2522 FrameScope scope(masm, StackFrame::MANUAL);
2523 __ EnterFrame(StackFrame::INTERNAL);
2524 __ Push(r6);
2525 __ CallRuntime(Runtime::kThrowNotConstructor);
2526 __ Trap(); // Unreachable.
2527 }
2528 __ bind(&new_target_constructor);
2529 }
2530
2531 Label stack_done, stack_overflow;
2533 __ subi(r8, r8, Operand(kJSArgcReceiverSlots));
2534 __ sub(r8, r8, r5, LeaveOE, SetRC);
2535 __ ble(&stack_done, cr0);
2536 {
2537 // ----------- S t a t e -------------
2538 // -- r3 : the number of arguments already in the stack
2539 // -- r4 : the target to call (can be any Object)
2540 // -- r5 : start index (to support rest parameters)
2541 // -- r6 : the new.target (for [[Construct]] calls)
2542 // -- fp : point to the caller stack frame
2543 // -- r8 : number of arguments to copy, i.e. arguments count - start index
2544 // -----------------------------------
2545
2546 // Check for stack overflow.
2547 __ StackOverflowCheck(r8, scratch, &stack_overflow);
2548
2549 // Forward the arguments from the caller frame.
2550 // Point to the first argument to copy (skipping the receiver).
2551 __ addi(r7, fp,
2554 __ ShiftLeftU64(scratch, r5, Operand(kSystemPointerSizeLog2));
2555 __ add(r7, r7, scratch);
2556
2557 // Move the arguments already in the stack,
2558 // including the receiver and the return address.
2559 // r8: Number of arguments to make room for.
2560 // r3: Number of arguments already on the stack.
2561 // r5: Points to first free slot on the stack after arguments were shifted.
2562 Generate_AllocateSpaceAndShiftExistingArguments(masm, r8, r3, r5, scratch,
2563 ip);
2564
2565 // Copy arguments from the caller frame.
2566 // TODO(victorgomes): Consider using forward order as potentially more cache
2567 // friendly.
2568 {
2569 Label loop;
2570 __ bind(&loop);
2571 {
2572 __ subi(r8, r8, Operand(1));
2573 __ ShiftLeftU64(scratch, r8, Operand(kSystemPointerSizeLog2));
2574 __ LoadU64(r0, MemOperand(r7, scratch));
2575 __ StoreU64(r0, MemOperand(r5, scratch));
2576 __ cmpi(r8, Operand::Zero());
2577 __ bne(&loop);
2578 }
2579 }
2580 }
2581 __ bind(&stack_done);
2582 // Tail-call to the actual Call or Construct builtin.
2583 __ TailCallBuiltin(target_builtin);
2584
2585 __ bind(&stack_overflow);
2586 __ TailCallRuntime(Runtime::kThrowStackOverflow);
2587}
2588
2589// static
2590void Builtins::Generate_CallFunction(MacroAssembler* masm,
2591 ConvertReceiverMode mode) {
2592 // ----------- S t a t e -------------
2593 // -- r3 : the number of arguments
2594 // -- r4 : the function to call (checked to be a JSFunction)
2595 // -----------------------------------
2596 __ AssertCallableFunction(r4);
2597
2598 __ LoadTaggedField(
2599 r5, FieldMemOperand(r4, JSFunction::kSharedFunctionInfoOffset), r0);
2600
2601 // Enter the context of the function; ToObject has to run in the function
2602 // context, and we also need to take the global proxy from the function
2603 // context in case of conversion.
2604 __ LoadTaggedField(cp, FieldMemOperand(r4, JSFunction::kContextOffset), r0);
2605 // We need to convert the receiver for non-native sloppy mode functions.
2606 Label done_convert;
2607 __ lwz(r6, FieldMemOperand(r5, SharedFunctionInfo::kFlagsOffset));
2608 __ andi(r0, r6,
2609 Operand(SharedFunctionInfo::IsStrictBit::kMask |
2610 SharedFunctionInfo::IsNativeBit::kMask));
2611 __ bne(&done_convert, cr0);
2612 {
2613 // ----------- S t a t e -------------
2614 // -- r3 : the number of arguments
2615 // -- r4 : the function to call (checked to be a JSFunction)
2616 // -- r5 : the shared function info.
2617 // -- cp : the function context.
2618 // -----------------------------------
2619
2621 // Patch receiver to global proxy.
2622 __ LoadGlobalProxy(r6);
2623 } else {
2624 Label convert_to_object, convert_receiver;
2625 __ LoadReceiver(r6);
2626 __ JumpIfSmi(r6, &convert_to_object);
2627 static_assert(LAST_JS_RECEIVER_TYPE == LAST_TYPE);
2628 __ CompareObjectType(r6, r7, r7, FIRST_JS_RECEIVER_TYPE);
2629 __ bge(&done_convert);
2631 Label convert_global_proxy;
2632 __ JumpIfRoot(r6, RootIndex::kUndefinedValue, &convert_global_proxy);
2633 __ JumpIfNotRoot(r6, RootIndex::kNullValue, &convert_to_object);
2634 __ bind(&convert_global_proxy);
2635 {
2636 // Patch receiver to global proxy.
2637 __ LoadGlobalProxy(r6);
2638 }
2639 __ b(&convert_receiver);
2640 }
2641 __ bind(&convert_to_object);
2642 {
2643 // Convert receiver using ToObject.
2644 // TODO(bmeurer): Inline the allocation here to avoid building the frame
2645 // in the fast case? (fall back to AllocateInNewSpace?)
2646 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
2647 __ SmiTag(r3);
2648 __ Push(r3, r4);
2649 __ mr(r3, r6);
2650 __ Push(cp);
2651 __ CallBuiltin(Builtin::kToObject);
2652 __ Pop(cp);
2653 __ mr(r6, r3);
2654 __ Pop(r3, r4);
2655 __ SmiUntag(r3);
2656 }
2657 __ LoadTaggedField(
2658 r5, FieldMemOperand(r4, JSFunction::kSharedFunctionInfoOffset), r0);
2659 __ bind(&convert_receiver);
2660 }
2661 __ StoreReceiver(r6);
2662 }
2663 __ bind(&done_convert);
2664
2665 // ----------- S t a t e -------------
2666 // -- r3 : the number of arguments
2667 // -- r4 : the function to call (checked to be a JSFunction)
2668 // -- r5 : the shared function info.
2669 // -- cp : the function context.
2670 // -----------------------------------
2671
2672 __ LoadU16(
2673 r5, FieldMemOperand(r5, SharedFunctionInfo::kFormalParameterCountOffset));
2674 __ InvokeFunctionCode(r4, no_reg, r5, r3, InvokeType::kJump);
2675}
2676
2677namespace {
2678
2679void Generate_PushBoundArguments(MacroAssembler* masm) {
2680 // ----------- S t a t e -------------
2681 // -- r3 : the number of arguments
2682 // -- r4 : target (checked to be a JSBoundFunction)
2683 // -- r6 : new.target (only in case of [[Construct]])
2684 // -----------------------------------
2685
2686 // Load [[BoundArguments]] into r5 and length of that into r7.
2687 Label no_bound_arguments;
2688 __ LoadTaggedField(
2689 r5, FieldMemOperand(r4, JSBoundFunction::kBoundArgumentsOffset), r0);
2690 __ SmiUntag(r7, FieldMemOperand(r5, offsetof(FixedArray, length_)), SetRC,
2691 r0);
2692 __ beq(&no_bound_arguments, cr0);
2693 {
2694 // ----------- S t a t e -------------
2695 // -- r3 : the number of arguments
2696 // -- r4 : target (checked to be a JSBoundFunction)
2697 // -- r5 : the [[BoundArguments]] (implemented as FixedArray)
2698 // -- r6 : new.target (only in case of [[Construct]])
2699 // -- r7 : the number of [[BoundArguments]]
2700 // -----------------------------------
2701
2702 Register scratch = r9;
2703 // Reserve stack space for the [[BoundArguments]].
2704 {
2705 Label done;
2706 __ ShiftLeftU64(r10, r7, Operand(kSystemPointerSizeLog2));
2707 __ sub(r0, sp, r10);
2708 // Check the stack for overflow. We are not trying to catch interruptions
2709 // (i.e. debug break and preemption) here, so check the "real stack
2710 // limit".
2711 {
2712 __ LoadStackLimit(scratch, StackLimitKind::kRealStackLimit, ip);
2713 __ CmpU64(r0, scratch);
2714 }
2715 __ bgt(&done); // Signed comparison.
2716 {
2717 FrameScope scope(masm, StackFrame::MANUAL);
2718 __ EnterFrame(StackFrame::INTERNAL);
2719 __ CallRuntime(Runtime::kThrowStackOverflow);
2720 }
2721 __ bind(&done);
2722 }
2723
2724 // Pop receiver.
2725 __ Pop(r8);
2726
2727 // Push [[BoundArguments]].
2728 {
2729 Label loop, done;
2730 __ add(r3, r3, r7); // Adjust effective number of arguments.
2731 __ addi(r5, r5,
2732 Operand(OFFSET_OF_DATA_START(FixedArray) - kHeapObjectTag));
2733 __ mtctr(r7);
2734
2735 __ bind(&loop);
2736 __ subi(r7, r7, Operand(1));
2737 __ ShiftLeftU64(scratch, r7, Operand(kTaggedSizeLog2));
2738 __ add(scratch, scratch, r5);
2739 __ LoadTaggedField(scratch, MemOperand(scratch), r0);
2740 __ Push(scratch);
2741 __ bdnz(&loop);
2742 __ bind(&done);
2743 }
2744
2745 // Push receiver.
2746 __ Push(r8);
2747 }
2748 __ bind(&no_bound_arguments);
2749}
2750
2751} // namespace
2752
2753// static
2754void Builtins::Generate_CallBoundFunctionImpl(MacroAssembler* masm) {
2755 // ----------- S t a t e -------------
2756 // -- r3 : the number of arguments
2757 // -- r4 : the function to call (checked to be a JSBoundFunction)
2758 // -----------------------------------
2759 __ AssertBoundFunction(r4);
2760
2761 // Patch the receiver to [[BoundThis]].
2762 __ LoadTaggedField(r6, FieldMemOperand(r4, JSBoundFunction::kBoundThisOffset),
2763 r0);
2764 __ StoreReceiver(r6);
2765
2766 // Push the [[BoundArguments]] onto the stack.
2767 Generate_PushBoundArguments(masm);
2768
2769 // Call the [[BoundTargetFunction]] via the Call builtin.
2770 __ LoadTaggedField(
2771 r4, FieldMemOperand(r4, JSBoundFunction::kBoundTargetFunctionOffset), r0);
2772 __ TailCallBuiltin(Builtins::Call());
2773}
2774
2775// static
2776void Builtins::Generate_Call(MacroAssembler* masm, ConvertReceiverMode mode) {
2777 // ----------- S t a t e -------------
2778 // -- r3 : the number of arguments
2779 // -- r4 : the target to call (can be any Object).
2780 // -----------------------------------
2781 Register target = r4;
2782 Register map = r7;
2783 Register instance_type = r8;
2784 Register scratch = r9;
2785 DCHECK(!AreAliased(r3, target, map, instance_type));
2786
2787 Label non_callable, class_constructor;
2788 __ JumpIfSmi(target, &non_callable);
2789 __ LoadMap(map, target);
2790 __ CompareInstanceTypeRange(map, instance_type, scratch,
2793 __ TailCallBuiltin(Builtins::CallFunction(mode), le);
2794 __ cmpi(instance_type, Operand(JS_BOUND_FUNCTION_TYPE));
2795 __ TailCallBuiltin(Builtin::kCallBoundFunction, eq);
2796
2797 // Check if target has a [[Call]] internal method.
2798 {
2799 Register flags = r7;
2800 __ lbz(flags, FieldMemOperand(map, Map::kBitFieldOffset));
2801 map = no_reg;
2802 __ TestBit(flags, Map::Bits1::IsCallableBit::kShift, r0);
2803 __ beq(&non_callable, cr0);
2804 }
2805
2806 // Check if target is a proxy and call CallProxy external builtin
2807 __ cmpi(instance_type, Operand(JS_PROXY_TYPE));
2808 __ TailCallBuiltin(Builtin::kCallProxy, eq);
2809
2810 // Check if target is a wrapped function and call CallWrappedFunction external
2811 // builtin
2812 __ cmpi(instance_type, Operand(JS_WRAPPED_FUNCTION_TYPE));
2813 __ TailCallBuiltin(Builtin::kCallWrappedFunction, eq);
2814
2815 // ES6 section 9.2.1 [[Call]] ( thisArgument, argumentsList)
2816 // Check that the function is not a "classConstructor".
2817 __ cmpi(instance_type, Operand(JS_CLASS_CONSTRUCTOR_TYPE));
2818 __ beq(&class_constructor);
2819
2820 // 2. Call to something else, which might have a [[Call]] internal method (if
2821 // not we raise an exception).
2822 // Overwrite the original receiver the (original) target.
2823 __ StoreReceiver(target);
2824 // Let the "call_as_function_delegate" take care of the rest.
2825 __ LoadNativeContextSlot(target, Context::CALL_AS_FUNCTION_DELEGATE_INDEX);
2826 __ TailCallBuiltin(
2828
2829 // 3. Call to something that is not callable.
2830 __ bind(&non_callable);
2831 {
2832 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
2833 __ Push(target);
2834 __ CallRuntime(Runtime::kThrowCalledNonCallable);
2835 __ Trap(); // Unreachable.
2836 }
2837
2838 // 4. The function is a "classConstructor", need to raise an exception.
2839 __ bind(&class_constructor);
2840 {
2841 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
2842 __ Push(target);
2843 __ CallRuntime(Runtime::kThrowConstructorNonCallableError);
2844 __ Trap(); // Unreachable.
2845 }
2846}
2847
2848// static
2849void Builtins::Generate_ConstructFunction(MacroAssembler* masm) {
2850 // ----------- S t a t e -------------
2851 // -- r3 : the number of arguments
2852 // -- r4 : the constructor to call (checked to be a JSFunction)
2853 // -- r6 : the new target (checked to be a constructor)
2854 // -----------------------------------
2855 __ AssertConstructor(r4);
2856 __ AssertFunction(r4);
2857
2858 // Calling convention for function specific ConstructStubs require
2859 // r5 to contain either an AllocationSite or undefined.
2860 __ LoadRoot(r5, RootIndex::kUndefinedValue);
2861
2862 Label call_generic_stub;
2863
2864 // Jump to JSBuiltinsConstructStub or JSConstructStubGeneric.
2865 __ LoadTaggedField(
2866 r7, FieldMemOperand(r4, JSFunction::kSharedFunctionInfoOffset), r0);
2867 __ lwz(r7, FieldMemOperand(r7, SharedFunctionInfo::kFlagsOffset));
2868 __ mov(ip, Operand(SharedFunctionInfo::ConstructAsBuiltinBit::kMask));
2869 __ and_(r7, r7, ip, SetRC);
2870 __ beq(&call_generic_stub, cr0);
2871
2872 __ TailCallBuiltin(Builtin::kJSBuiltinsConstructStub);
2873
2874 __ bind(&call_generic_stub);
2875 __ TailCallBuiltin(Builtin::kJSConstructStubGeneric);
2876}
2877
2878// static
2879void Builtins::Generate_ConstructBoundFunction(MacroAssembler* masm) {
2880 // ----------- S t a t e -------------
2881 // -- r3 : the number of arguments
2882 // -- r4 : the function to call (checked to be a JSBoundFunction)
2883 // -- r6 : the new target (checked to be a constructor)
2884 // -----------------------------------
2885 __ AssertConstructor(r4);
2886 __ AssertBoundFunction(r4);
2887
2888 // Push the [[BoundArguments]] onto the stack.
2889 Generate_PushBoundArguments(masm);
2890
2891 // Patch new.target to [[BoundTargetFunction]] if new.target equals target.
2892 Label skip;
2893 __ CompareTagged(r4, r6);
2894 __ bne(&skip);
2895 __ LoadTaggedField(
2896 r6, FieldMemOperand(r4, JSBoundFunction::kBoundTargetFunctionOffset), r0);
2897 __ bind(&skip);
2898
2899 // Construct the [[BoundTargetFunction]] via the Construct builtin.
2900 __ LoadTaggedField(
2901 r4, FieldMemOperand(r4, JSBoundFunction::kBoundTargetFunctionOffset), r0);
2902 __ TailCallBuiltin(Builtin::kConstruct);
2903}
2904
2905// static
2906void Builtins::Generate_Construct(MacroAssembler* masm) {
2907 // ----------- S t a t e -------------
2908 // -- r3 : the number of arguments
2909 // -- r4 : the constructor to call (can be any Object)
2910 // -- r6 : the new target (either the same as the constructor or
2911 // the JSFunction on which new was invoked initially)
2912 // -----------------------------------
2913 Register target = r4;
2914 Register map = r7;
2915 Register instance_type = r8;
2916 Register scratch = r9;
2917 DCHECK(!AreAliased(r3, target, map, instance_type, scratch));
2918
2919 // Check if target is a Smi.
2920 Label non_constructor, non_proxy;
2921 __ JumpIfSmi(target, &non_constructor);
2922
2923 // Check if target has a [[Construct]] internal method.
2924 __ LoadTaggedField(map, FieldMemOperand(target, HeapObject::kMapOffset), r0);
2925 {
2926 Register flags = r5;
2927 DCHECK(!AreAliased(r3, target, map, instance_type, flags));
2928 __ lbz(flags, FieldMemOperand(map, Map::kBitFieldOffset));
2929 __ TestBit(flags, Map::Bits1::IsConstructorBit::kShift, r0);
2930 __ beq(&non_constructor, cr0);
2931 }
2932
2933 // Dispatch based on instance type.
2934 __ CompareInstanceTypeRange(map, instance_type, scratch,
2935 FIRST_JS_FUNCTION_TYPE, LAST_JS_FUNCTION_TYPE);
2936 __ TailCallBuiltin(Builtin::kConstructFunction, le);
2937
2938 // Only dispatch to bound functions after checking whether they are
2939 // constructors.
2940 __ cmpi(instance_type, Operand(JS_BOUND_FUNCTION_TYPE));
2941 __ TailCallBuiltin(Builtin::kConstructBoundFunction, eq);
2942
2943 // Only dispatch to proxies after checking whether they are constructors.
2944 __ cmpi(instance_type, Operand(JS_PROXY_TYPE));
2945 __ bne(&non_proxy);
2946 __ TailCallBuiltin(Builtin::kConstructProxy);
2947
2948 // Called Construct on an exotic Object with a [[Construct]] internal method.
2949 __ bind(&non_proxy);
2950 {
2951 // Overwrite the original receiver with the (original) target.
2952 __ StoreReceiver(target);
2953 // Let the "call_as_constructor_delegate" take care of the rest.
2954 __ LoadNativeContextSlot(target,
2955 Context::CALL_AS_CONSTRUCTOR_DELEGATE_INDEX);
2956 __ TailCallBuiltin(Builtins::CallFunction());
2957 }
2958
2959 // Called Construct on an Object that doesn't have a [[Construct]] internal
2960 // method.
2961 __ bind(&non_constructor);
2962 __ TailCallBuiltin(Builtin::kConstructedNonConstructable);
2963}
2964
2965#if V8_ENABLE_WEBASSEMBLY
2966
2967struct SaveWasmParamsScope {
2968 explicit SaveWasmParamsScope(MacroAssembler* masm) : masm(masm) {
2969 for (Register gp_param_reg : wasm::kGpParamRegisters) {
2970 gp_regs.set(gp_param_reg);
2971 }
2972 for (DoubleRegister fp_param_reg : wasm::kFpParamRegisters) {
2973 fp_regs.set(fp_param_reg);
2974 }
2975
2976 CHECK_EQ(gp_regs.Count(), arraysize(wasm::kGpParamRegisters));
2977 CHECK_EQ(fp_regs.Count(), arraysize(wasm::kFpParamRegisters));
2978 CHECK_EQ(simd_regs.Count(), arraysize(wasm::kFpParamRegisters));
2979 CHECK_EQ(WasmLiftoffSetupFrameConstants::kNumberOfSavedGpParamRegs + 1,
2980 gp_regs.Count());
2981 CHECK_EQ(WasmLiftoffSetupFrameConstants::kNumberOfSavedFpParamRegs,
2982 fp_regs.Count());
2983 CHECK_EQ(WasmLiftoffSetupFrameConstants::kNumberOfSavedFpParamRegs,
2984 simd_regs.Count());
2985
2986 __ MultiPush(gp_regs);
2987 __ MultiPushF64AndV128(fp_regs, simd_regs, ip, r0);
2988 }
2989 ~SaveWasmParamsScope() {
2990 __ MultiPopF64AndV128(fp_regs, simd_regs, ip, r0);
2991 __ MultiPop(gp_regs);
2992 }
2993
2994 RegList gp_regs;
2995 DoubleRegList fp_regs;
2996 // List must match register numbers under kFpParamRegisters.
2997 Simd128RegList simd_regs = {v1, v2, v3, v4, v5, v6, v7, v8};
2998 MacroAssembler* masm;
2999};
3000
3001void Builtins::Generate_WasmLiftoffFrameSetup(MacroAssembler* masm) {
3002 Register func_index = wasm::kLiftoffFrameSetupFunctionReg;
3003 Register vector = r11;
3004 Register scratch = ip;
3005 Label allocate_vector, done;
3006
3007 __ LoadTaggedField(
3008 vector,
3010 WasmTrustedInstanceData::kFeedbackVectorsOffset),
3011 scratch);
3012 __ ShiftLeftU64(scratch, func_index, Operand(kTaggedSizeLog2));
3013 __ AddS64(vector, vector, scratch);
3014 __ LoadTaggedField(vector,
3015 FieldMemOperand(vector, OFFSET_OF_DATA_START(FixedArray)),
3016 scratch);
3017 __ JumpIfSmi(vector, &allocate_vector);
3018 __ bind(&done);
3020 __ push(vector);
3021 __ Ret();
3022
3023 __ bind(&allocate_vector);
3024
3025 // Feedback vector doesn't exist yet. Call the runtime to allocate it.
3026 // We temporarily change the frame type for this, because we need special
3027 // handling by the stack walker in case of GC.
3028 __ mov(scratch,
3029 Operand(StackFrame::TypeToMarker(StackFrame::WASM_LIFTOFF_SETUP)));
3030 __ StoreU64(scratch, MemOperand(sp));
3031
3032 // Save current return address as it will get clobbered during CallRuntime.
3033 __ mflr(scratch);
3034 __ push(scratch);
3035 {
3036 SaveWasmParamsScope save_params(masm); // Will use r0 and ip as scratch.
3037 // Arguments to the runtime function: instance data, func_index.
3039 __ SmiTag(func_index);
3040 __ push(func_index);
3041 // Allocate a stack slot where the runtime function can spill a pointer
3042 // to the {NativeModule}.
3043 __ push(r11);
3044 __ LoadSmiLiteral(cp, Smi::zero());
3045 __ CallRuntime(Runtime::kWasmAllocateFeedbackVector, 3);
3046 __ mr(vector, kReturnRegister0);
3047 // Saved parameters are restored at the end of this block.
3048 }
3049 __ pop(scratch);
3050 __ mtlr(scratch);
3051
3052 __ mov(scratch, Operand(StackFrame::TypeToMarker(StackFrame::WASM)));
3053 __ StoreU64(scratch, MemOperand(sp));
3054 __ b(&done);
3055}
3056
3057void Builtins::Generate_WasmCompileLazy(MacroAssembler* masm) {
3058 // The function index was put in a register by the jump table trampoline.
3059 // Convert to Smi for the runtime call.
3061
3062 {
3063 HardAbortScope hard_abort(masm); // Avoid calls to Abort.
3064 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
3065
3066 {
3067 SaveWasmParamsScope save_params(masm); // Will use r0 and ip as scratch.
3068
3069 // Push the instance data as an explicit argument to the runtime function.
3071 // Push the function index as second argument.
3073 // Initialize the JavaScript context with 0. CEntry will use it to
3074 // set the current context on the isolate.
3075 __ LoadSmiLiteral(cp, Smi::zero());
3076 __ CallRuntime(Runtime::kWasmCompileLazy, 2);
3077 // The runtime function returns the jump table slot offset as a Smi. Use
3078 // that to compute the jump target in r11.
3080 __ mr(r11, kReturnRegister0);
3081
3082 // Saved parameters are restored at the end of this block.
3083 }
3084
3085 // After the instance data register has been restored, we can add the jump
3086 // table start to the jump table offset already stored in r11.
3087 __ LoadU64(ip,
3089 WasmTrustedInstanceData::kJumpTableStartOffset),
3090 r0);
3091 __ AddS64(r11, r11, ip);
3092 }
3093
3094 // Finally, jump to the jump table slot for the function.
3095 __ Jump(r11);
3096}
3097
3098void Builtins::Generate_WasmDebugBreak(MacroAssembler* masm) {
3099 HardAbortScope hard_abort(masm); // Avoid calls to Abort.
3100 {
3101 FrameAndConstantPoolScope scope(masm, StackFrame::WASM_DEBUG_BREAK);
3102
3103 // Save all parameter registers. They might hold live values, we restore
3104 // them after the runtime call.
3108 r0);
3109
3110 // Initialize the JavaScript context with 0. CEntry will use it to
3111 // set the current context on the isolate.
3112 __ LoadSmiLiteral(cp, Smi::zero());
3113 __ CallRuntime(Runtime::kWasmDebugBreak, 0);
3114
3115 // Restore registers.
3118 r0);
3120 }
3121 __ Ret();
3122}
3123
3124namespace {
3125// Check that the stack was in the old state (if generated code assertions are
3126// enabled), and switch to the new state.
3127void SwitchStackState(MacroAssembler* masm, Register stack, Register tmp,
3129 wasm::JumpBuffer::StackState new_state) {
3130 __ LoadU32(tmp, MemOperand(stack, wasm::kStackStateOffset));
3131 Label ok;
3132 __ JumpIfEqual(tmp, old_state, &ok);
3133 __ Trap();
3134 __ bind(&ok);
3135 __ mov(tmp, Operand(new_state));
3136 __ StoreU32(tmp, MemOperand(stack, wasm::kStackStateOffset), r0);
3137}
3138
3139// Switch the stack pointer.
3140void SwitchStackPointer(MacroAssembler* masm, Register stack) {
3141 __ LoadU64(sp, MemOperand(stack, wasm::kStackSpOffset));
3142}
3143
3144void FillJumpBuffer(MacroAssembler* masm, Register stack, Label* target,
3145 Register tmp) {
3146 __ mr(tmp, sp);
3147 __ StoreU64(tmp, MemOperand(stack, wasm::kStackSpOffset));
3148 __ StoreU64(fp, MemOperand(stack, wasm::kStackFpOffset));
3149 __ LoadStackLimit(tmp, StackLimitKind::kRealStackLimit, r0);
3150 __ StoreU64(tmp, MemOperand(stack, wasm::kStackLimitOffset));
3151
3152 __ GetLabelAddress(tmp, target);
3153 // Stash the address in the jump buffer.
3154 __ StoreU64(tmp, MemOperand(stack, wasm::kStackPcOffset));
3155}
3156
3157void LoadJumpBuffer(MacroAssembler* masm, Register stack, bool load_pc,
3158 Register tmp, wasm::JumpBuffer::StackState expected_state) {
3159 SwitchStackPointer(masm, stack);
3160 __ LoadU64(fp, MemOperand(stack, wasm::kStackFpOffset));
3161 SwitchStackState(masm, stack, tmp, expected_state, wasm::JumpBuffer::Active);
3162 if (load_pc) {
3163 __ LoadU64(tmp, MemOperand(stack, wasm::kStackPcOffset));
3164 __ Jump(tmp);
3165 }
3166 // The stack limit in StackGuard is set separately under the ExecutionAccess
3167 // lock.
3168}
3169
3170void LoadTargetJumpBuffer(MacroAssembler* masm, Register target_stack,
3171 Register tmp,
3172 wasm::JumpBuffer::StackState expected_state) {
3173 __ Zero(MemOperand(fp, StackSwitchFrameConstants::kGCScanSlotCountOffset));
3174 // Switch stack!
3175 LoadJumpBuffer(masm, target_stack, false, tmp, expected_state);
3176}
3177
3178// Updates the stack limit and central stack info, and validates the switch.
3179void SwitchStacks(MacroAssembler* masm, Register old_stack, bool return_switch,
3180 const std::initializer_list<Register> keep) {
3181 using ER = ExternalReference;
3182
3183 for (auto reg : keep) {
3184 __ Push(reg);
3185 }
3186
3187 {
3188 __ PrepareCallCFunction(2, r0);
3189 FrameScope scope(masm, StackFrame::MANUAL);
3190 // Move {old_stack} first in case it aliases kCArgRegs[0].
3191 __ Move(kCArgRegs[1], old_stack);
3192 __ Move(kCArgRegs[0], ExternalReference::isolate_address(masm->isolate()));
3193 __ CallCFunction(
3194 return_switch ? ER::wasm_return_switch() : ER::wasm_switch_stacks(), 2);
3195 }
3196
3197 for (auto it = std::rbegin(keep); it != std::rend(keep); ++it) {
3198 __ Pop(*it);
3199 }
3200}
3201
3202void ReloadParentStack(MacroAssembler* masm, Register return_reg,
3203 Register return_value, Register context, Register tmp1,
3204 Register tmp2, Register tmp3) {
3205 Register active_stack = tmp1;
3206 __ LoadRootRelative(active_stack, IsolateData::active_stack_offset());
3207
3208 // Set a null pointer in the jump buffer's SP slot to indicate to the stack
3209 // frame iterator that this stack is empty.
3210 __ Zero(MemOperand(active_stack, wasm::kStackSpOffset));
3211 {
3212 UseScratchRegisterScope temps(masm);
3213 Register scratch = temps.Acquire();
3214 SwitchStackState(masm, active_stack, scratch, wasm::JumpBuffer::Active,
3216 }
3217 Register parent = tmp2;
3218 __ LoadU64(parent, MemOperand(active_stack, wasm::kStackParentOffset), r0);
3219
3220 // Update active stack.
3221 __ StoreRootRelative(IsolateData::active_stack_offset(), parent);
3222
3223 // Switch stack!
3224 SwitchStacks(masm, active_stack, true,
3225 {return_reg, return_value, context, parent});
3226 LoadJumpBuffer(masm, parent, false, tmp3, wasm::JumpBuffer::Inactive);
3227}
3228
3229void RestoreParentSuspender(MacroAssembler* masm, Register tmp1) {
3230 Register suspender = tmp1;
3231 __ LoadRoot(suspender, RootIndex::kActiveSuspender);
3232 __ LoadTaggedField(
3233 suspender, FieldMemOperand(suspender, WasmSuspenderObject::kParentOffset),
3234 r0);
3235
3236 int32_t active_suspender_offset =
3238 RootIndex::kActiveSuspender);
3239 __ StoreU64(suspender, MemOperand(kRootRegister, active_suspender_offset));
3240}
3241
3242void ResetStackSwitchFrameStackSlots(MacroAssembler* masm) {
3243 __ Zero(MemOperand(fp, StackSwitchFrameConstants::kResultArrayOffset),
3244 MemOperand(fp, StackSwitchFrameConstants::kImplicitArgOffset));
3245}
3246
3247class RegisterAllocator {
3248 public:
3249 class Scoped {
3250 public:
3251 Scoped(RegisterAllocator* allocator, Register* reg)
3252 : allocator_(allocator), reg_(reg) {}
3253 ~Scoped() { allocator_->Free(reg_); }
3254
3255 private:
3256 RegisterAllocator* allocator_;
3257 Register* reg_;
3258 };
3259
3260 explicit RegisterAllocator(const RegList& registers)
3262 void Ask(Register* reg) {
3263 DCHECK_EQ(*reg, no_reg);
3264 DCHECK(!available_.is_empty());
3265 *reg = available_.PopFirst();
3266 allocated_registers_.push_back(reg);
3267 }
3268
3269 bool registerIsAvailable(const Register& reg) { return available_.has(reg); }
3270
3271 void Pinned(const Register& requested, Register* reg) {
3272 DCHECK(registerIsAvailable(requested));
3273 *reg = requested;
3274 Reserve(requested);
3275 allocated_registers_.push_back(reg);
3276 }
3277
3278 void Free(Register* reg) {
3279 DCHECK_NE(*reg, no_reg);
3280 available_.set(*reg);
3281 *reg = no_reg;
3283 find(allocated_registers_.begin(), allocated_registers_.end(), reg));
3284 }
3285
3286 void Reserve(const Register& reg) {
3287 if (reg == no_reg) {
3288 return;
3289 }
3290 DCHECK(registerIsAvailable(reg));
3291 available_.clear(reg);
3292 }
3293
3294 void Reserve(const Register& reg1, const Register& reg2,
3295 const Register& reg3 = no_reg, const Register& reg4 = no_reg,
3296 const Register& reg5 = no_reg, const Register& reg6 = no_reg) {
3297 Reserve(reg1);
3298 Reserve(reg2);
3299 Reserve(reg3);
3300 Reserve(reg4);
3301 Reserve(reg5);
3302 Reserve(reg6);
3303 }
3304
3305 bool IsUsed(const Register& reg) {
3306 return initial_.has(reg) && !registerIsAvailable(reg);
3307 }
3308
3309 void ResetExcept(const Register& reg1 = no_reg, const Register& reg2 = no_reg,
3310 const Register& reg3 = no_reg, const Register& reg4 = no_reg,
3311 const Register& reg5 = no_reg,
3312 const Register& reg6 = no_reg) {
3314 available_.clear(reg1);
3315 available_.clear(reg2);
3316 available_.clear(reg3);
3317 available_.clear(reg4);
3318 available_.clear(reg5);
3319 available_.clear(reg6);
3320
3321 auto it = allocated_registers_.begin();
3322 while (it != allocated_registers_.end()) {
3323 if (registerIsAvailable(**it)) {
3324 **it = no_reg;
3325 it = allocated_registers_.erase(it);
3326 } else {
3327 it++;
3328 }
3329 }
3330 }
3331
3332 static RegisterAllocator WithAllocatableGeneralRegisters() {
3333 RegList list;
3334 const RegisterConfiguration* config(RegisterConfiguration::Default());
3335
3336 for (int i = 0; i < config->num_allocatable_general_registers(); ++i) {
3337 int code = config->GetAllocatableGeneralCode(i);
3338 Register candidate = Register::from_code(code);
3339 list.set(candidate);
3340 }
3341 return RegisterAllocator(list);
3342 }
3343
3344 private:
3345 std::vector<Register*> allocated_registers_;
3346 const RegList initial_;
3348};
3349
3350#define DEFINE_REG(Name) \
3351 Register Name = no_reg; \
3352 regs.Ask(&Name);
3353
3354#define DEFINE_REG_W(Name) \
3355 DEFINE_REG(Name); \
3356 Name = Name.W();
3357
3358#define ASSIGN_REG(Name) regs.Ask(&Name);
3359
3360#define ASSIGN_REG_W(Name) \
3361 ASSIGN_REG(Name); \
3362 Name = Name.W();
3363
3364#define DEFINE_PINNED(Name, Reg) \
3365 Register Name = no_reg; \
3366 regs.Pinned(Reg, &Name);
3367
3368#define ASSIGN_PINNED(Name, Reg) regs.Pinned(Reg, &Name);
3369
3370#define DEFINE_SCOPED(Name) \
3371 DEFINE_REG(Name) \
3372 RegisterAllocator::Scoped scope_##Name(&regs, &Name);
3373
3374#define FREE_REG(Name) regs.Free(&Name);
3375
3376// Loads the context field of the WasmTrustedInstanceData or WasmImportData
3377// depending on the data's type, and places the result in the input register.
3378void GetContextFromImplicitArg(MacroAssembler* masm, Register data,
3379 Register scratch) {
3380 __ LoadTaggedField(scratch, FieldMemOperand(data, HeapObject::kMapOffset),
3381 r0);
3382 __ CompareInstanceType(scratch, scratch, WASM_TRUSTED_INSTANCE_DATA_TYPE);
3383 Label instance;
3384 Label end;
3385 __ beq(&instance);
3386 __ LoadTaggedField(
3387 data, FieldMemOperand(data, WasmImportData::kNativeContextOffset), r0);
3388 __ jmp(&end);
3389 __ bind(&instance);
3390 __ LoadTaggedField(
3391 data,
3392 FieldMemOperand(data, WasmTrustedInstanceData::kNativeContextOffset), r0);
3393 __ bind(&end);
3394}
3395
3396} // namespace
3397
3398void Builtins::Generate_WasmToJsWrapperAsm(MacroAssembler* masm) {
3399 // Push registers in reverse order so that they are on the stack like
3400 // in an array, with the first item being at the lowest address.
3401 DoubleRegList fp_regs;
3402 for (DoubleRegister fp_param_reg : wasm::kFpParamRegisters) {
3403 fp_regs.set(fp_param_reg);
3404 }
3405 __ MultiPushDoubles(fp_regs);
3406
3407 // Push the GP registers in reverse order so that they are on the stack like
3408 // in an array, with the first item being at the lowest address.
3409 RegList gp_regs;
3410 for (size_t i = arraysize(wasm::kGpParamRegisters) - 1; i > 0; --i) {
3411 gp_regs.set(wasm::kGpParamRegisters[i]);
3412 }
3413 __ MultiPush(gp_regs);
3414 // Reserve a slot for the signature.
3415 __ Push(r3);
3416 __ TailCallBuiltin(Builtin::kWasmToJsWrapperCSA);
3417}
3418
3419void Builtins::Generate_WasmTrapHandlerLandingPad(MacroAssembler* masm) {
3420 __ Trap();
3421}
3422
3423void Builtins::Generate_WasmSuspend(MacroAssembler* masm) {
3424 auto regs = RegisterAllocator::WithAllocatableGeneralRegisters();
3425 // Set up the stackframe.
3426 __ EnterFrame(StackFrame::STACK_SWITCH);
3427
3428 DEFINE_PINNED(suspender, r3);
3430
3431 __ SubS64(
3432 sp, sp,
3433 Operand(StackSwitchFrameConstants::kNumSpillSlots * kSystemPointerSize));
3434 // Set a sentinel value for the spill slots visited by the GC.
3435 ResetStackSwitchFrameStackSlots(masm);
3436
3437 // -------------------------------------------
3438 // Save current state in active jump buffer.
3439 // -------------------------------------------
3440 Label resume;
3441 DEFINE_REG(stack);
3442 __ LoadRootRelative(stack, IsolateData::active_stack_offset());
3443 DEFINE_REG(scratch);
3444 FillJumpBuffer(masm, stack, &resume, scratch);
3445 SwitchStackState(masm, stack, scratch, wasm::JumpBuffer::Active,
3447 regs.ResetExcept(suspender, stack);
3448
3449 DEFINE_REG(suspender_stack);
3450 __ LoadU64(suspender_stack,
3451 FieldMemOperand(suspender, WasmSuspenderObject::kStackOffset), r0);
3452 if (v8_flags.debug_code) {
3453 // -------------------------------------------
3454 // Check that the suspender's stack is the active stack.
3455 // -------------------------------------------
3456 // TODO(thibaudm): Once we add core stack-switching instructions, this
3457 // check will not hold anymore: it's possible that the active stack
3458 // changed (due to an internal switch), so we have to update the suspender.
3459 __ CmpS64(suspender_stack, stack);
3460 Label ok;
3461 __ beq(&ok);
3462 __ Trap();
3463 __ bind(&ok);
3464 }
3465 // -------------------------------------------
3466 // Update roots.
3467 // -------------------------------------------
3468 DEFINE_REG(caller);
3469 __ LoadU64(caller, MemOperand(suspender_stack, wasm::kStackParentOffset), r0);
3470 __ StoreRootRelative(IsolateData::active_stack_offset(), caller);
3471 DEFINE_REG(parent);
3472 __ LoadTaggedField(
3473 parent, FieldMemOperand(suspender, WasmSuspenderObject::kParentOffset),
3474 r0);
3475 int32_t active_suspender_offset =
3477 RootIndex::kActiveSuspender);
3478 __ StoreU64(parent, MemOperand(kRootRegister, active_suspender_offset));
3479 regs.ResetExcept(suspender, caller, stack);
3480
3481 // -------------------------------------------
3482 // Load jump buffer.
3483 // -------------------------------------------
3484 SwitchStacks(masm, stack, false, {caller, suspender});
3485 FREE_REG(stack);
3486 __ LoadTaggedField(
3488 FieldMemOperand(suspender, WasmSuspenderObject::kPromiseOffset), r0);
3489 MemOperand GCScanSlotPlace =
3490 MemOperand(fp, StackSwitchFrameConstants::kGCScanSlotCountOffset);
3491 __ Zero(GCScanSlotPlace);
3492 ASSIGN_REG(scratch)
3493 LoadJumpBuffer(masm, caller, true, scratch, wasm::JumpBuffer::Inactive);
3494 if (v8_flags.debug_code) {
3495 __ Trap();
3496 }
3497 __ bind(&resume);
3498 __ LeaveFrame(StackFrame::STACK_SWITCH);
3499 __ blr();
3500}
3501
3502namespace {
3503// Resume the suspender stored in the closure. We generate two variants of this
3504// builtin: the onFulfilled variant resumes execution at the saved PC and
3505// forwards the value, the onRejected variant throws the value.
3506
3507void Generate_WasmResumeHelper(MacroAssembler* masm, wasm::OnResume on_resume) {
3508 auto regs = RegisterAllocator::WithAllocatableGeneralRegisters();
3509 __ EnterFrame(StackFrame::STACK_SWITCH);
3510
3511 DEFINE_PINNED(closure, kJSFunctionRegister); // r4
3512
3513 __ SubS64(
3514 sp, sp,
3515 Operand(StackSwitchFrameConstants::kNumSpillSlots * kSystemPointerSize));
3516 // Set a sentinel value for the spill slots visited by the GC.
3517 ResetStackSwitchFrameStackSlots(masm);
3518
3519 regs.ResetExcept(closure);
3520
3521 // -------------------------------------------
3522 // Load suspender from closure.
3523 // -------------------------------------------
3524 DEFINE_REG(sfi);
3525 __ LoadTaggedField(
3526 sfi,
3527 MemOperand(
3528 closure,
3530 r0);
3531 FREE_REG(closure);
3532 // Suspender should be ObjectRegister register to be used in
3533 // RecordWriteField calls later.
3535 DEFINE_REG(resume_data);
3536 __ LoadTaggedField(
3537 resume_data,
3538 FieldMemOperand(sfi, SharedFunctionInfo::kUntrustedFunctionDataOffset),
3539 r0);
3540 __ LoadTaggedField(
3541 suspender, FieldMemOperand(resume_data, WasmResumeData::kSuspenderOffset),
3542 r0);
3543 regs.ResetExcept(suspender);
3544
3545 // -------------------------------------------
3546 // Save current state.
3547 // -------------------------------------------
3548 Label suspend;
3549 DEFINE_REG(active_stack);
3550 __ LoadRootRelative(active_stack, IsolateData::active_stack_offset());
3551 DEFINE_REG(scratch);
3552 FillJumpBuffer(masm, active_stack, &suspend, scratch);
3553 SwitchStackState(masm, active_stack, scratch, wasm::JumpBuffer::Active,
3555
3556 // -------------------------------------------
3557 // Set the suspender and stack parents and update the roots
3558 // -------------------------------------------
3559 DEFINE_REG(active_suspender);
3560 __ LoadRoot(active_suspender, RootIndex::kActiveSuspender);
3561 __ StoreTaggedField(
3562 active_suspender,
3563 FieldMemOperand(suspender, WasmSuspenderObject::kParentOffset), r0);
3564 __ RecordWriteField(suspender, WasmSuspenderObject::kParentOffset,
3565 active_suspender, ip, kLRHasBeenSaved,
3567 int32_t active_suspender_offset =
3569 RootIndex::kActiveSuspender);
3570 __ StoreU64(suspender, MemOperand(kRootRegister, active_suspender_offset));
3571
3572 // Next line we are going to load a field from suspender, but we have to use
3573 // the same register for target_continuation to use it in RecordWriteField.
3574 // So, free suspender here to use pinned reg, but load from it next line.
3575 FREE_REG(suspender);
3576 DEFINE_REG(target_stack);
3577 suspender = target_stack;
3578 __ LoadU64(target_stack,
3579 FieldMemOperand(suspender, WasmSuspenderObject::kStackOffset), r0);
3580 suspender = no_reg;
3581
3582 __ StoreRootRelative(IsolateData::active_stack_offset(), target_stack);
3583 SwitchStacks(masm, active_stack, false, {target_stack});
3584 regs.ResetExcept(target_stack);
3585
3586 // -------------------------------------------
3587 // Load state from target jmpbuf (longjmp).
3588 // -------------------------------------------
3589 regs.Reserve(kReturnRegister0);
3590 ASSIGN_REG(scratch);
3591 // Move resolved value to return register.
3593 MemOperand GCScanSlotPlace =
3594 MemOperand(fp, StackSwitchFrameConstants::kGCScanSlotCountOffset);
3595 __ Zero(GCScanSlotPlace);
3596 if (on_resume == wasm::OnResume::kThrow) {
3597 // Switch without restoring the PC.
3598 LoadJumpBuffer(masm, target_stack, false, scratch,
3600 // Pop this frame now. The unwinder expects that the first STACK_SWITCH
3601 // frame is the outermost one.
3602 __ LeaveFrame(StackFrame::STACK_SWITCH);
3603 // Forward the onRejected value to kThrow.
3604 __ Push(kReturnRegister0);
3605 __ CallRuntime(Runtime::kThrow);
3606 } else {
3607 // Resume the stack normally.
3608 LoadJumpBuffer(masm, target_stack, true, scratch,
3610 }
3611 if (v8_flags.debug_code) {
3612 __ Trap();
3613 }
3614 __ bind(&suspend);
3615 __ LeaveFrame(StackFrame::STACK_SWITCH);
3616 // Pop receiver + parameter.
3617 __ AddS64(sp, sp, Operand(2 * kSystemPointerSize));
3618 __ blr();
3619}
3620} // namespace
3621
3622void Builtins::Generate_WasmResume(MacroAssembler* masm) {
3623 Generate_WasmResumeHelper(masm, wasm::OnResume::kContinue);
3624}
3625
3626void Builtins::Generate_WasmReject(MacroAssembler* masm) {
3627 Generate_WasmResumeHelper(masm, wasm::OnResume::kThrow);
3628}
3629
3630void Builtins::Generate_WasmOnStackReplace(MacroAssembler* masm) {
3631 // Only needed on x64.
3632 __ Trap();
3633}
3634
3635namespace {
3636void SwitchToAllocatedStack(MacroAssembler* masm, RegisterAllocator& regs,
3637 Register wasm_instance, Register wrapper_buffer,
3638 Register& original_fp, Register& new_wrapper_buffer,
3639 Label* suspend) {
3640 ResetStackSwitchFrameStackSlots(masm);
3641 DEFINE_SCOPED(scratch)
3642 DEFINE_REG(target_stack)
3643 __ LoadRootRelative(target_stack, IsolateData::active_stack_offset());
3644 DEFINE_REG(parent_stack)
3645 __ LoadU64(parent_stack, MemOperand(target_stack, wasm::kStackParentOffset),
3646 r0);
3647
3648 FillJumpBuffer(masm, parent_stack, suspend, scratch);
3649 SwitchStacks(masm, parent_stack, false, {wasm_instance, wrapper_buffer});
3650
3651 FREE_REG(parent_stack);
3652 // Save the old stack's fp in r15, and use it to access the parameters in
3653 // the parent frame.
3654 regs.Pinned(r15, &original_fp);
3655 __ Move(original_fp, fp);
3656 __ LoadRootRelative(target_stack, IsolateData::active_stack_offset());
3657 LoadTargetJumpBuffer(masm, target_stack, scratch,
3659 FREE_REG(target_stack);
3660
3661 // Push the loaded fp. We know it is null, because there is no frame yet,
3662 // so we could also push 0 directly. In any case we need to push it,
3663 // because this marks the base of the stack segment for
3664 // the stack frame iterator.
3665 __ EnterFrame(StackFrame::STACK_SWITCH);
3666
3667 int stack_space =
3668 RoundUp(StackSwitchFrameConstants::kNumSpillSlots * kSystemPointerSize +
3669 JSToWasmWrapperFrameConstants::kWrapperBufferSize,
3670 16);
3671 __ SubS64(sp, sp, Operand(stack_space));
3672 __ EnforceStackAlignment();
3673
3674 ASSIGN_REG(new_wrapper_buffer)
3675
3676 __ Move(new_wrapper_buffer, sp);
3677 // Copy data needed for return handling from old wrapper buffer to new one.
3678 // kWrapperBufferRefReturnCount will be copied too, because 8 bytes are copied
3679 // at the same time.
3680 static_assert(JSToWasmWrapperFrameConstants::kWrapperBufferRefReturnCount ==
3681 JSToWasmWrapperFrameConstants::kWrapperBufferReturnCount + 4);
3682
3683 __ LoadU64(
3684 scratch,
3685 MemOperand(wrapper_buffer,
3686 JSToWasmWrapperFrameConstants::kWrapperBufferReturnCount));
3687 __ StoreU64(
3688 scratch,
3689 MemOperand(new_wrapper_buffer,
3690 JSToWasmWrapperFrameConstants::kWrapperBufferReturnCount));
3691 __ LoadU64(
3692 scratch,
3693 MemOperand(
3694 wrapper_buffer,
3695 JSToWasmWrapperFrameConstants::kWrapperBufferSigRepresentationArray));
3696 __ StoreU64(
3697 scratch,
3698 MemOperand(
3699 new_wrapper_buffer,
3700 JSToWasmWrapperFrameConstants::kWrapperBufferSigRepresentationArray));
3701}
3702
3703void SwitchBackAndReturnPromise(MacroAssembler* masm, RegisterAllocator& regs,
3704 wasm::Promise mode, Label* return_promise) {
3705 regs.ResetExcept();
3706 // The return value of the wasm function becomes the parameter of the
3707 // FulfillPromise builtin, and the promise is the return value of this
3708 // wrapper.
3709 static const Builtin_FulfillPromise_InterfaceDescriptor desc;
3710 DEFINE_PINNED(promise, desc.GetRegisterParameter(0));
3711 DEFINE_PINNED(return_value, desc.GetRegisterParameter(1));
3712 DEFINE_SCOPED(tmp);
3713 DEFINE_SCOPED(tmp2);
3714 DEFINE_SCOPED(tmp3);
3715 if (mode == wasm::kPromise) {
3716 __ Move(return_value, kReturnRegister0);
3717 __ LoadRoot(promise, RootIndex::kActiveSuspender);
3718 __ LoadTaggedField(
3719 promise, FieldMemOperand(promise, WasmSuspenderObject::kPromiseOffset),
3720 r0);
3721 }
3722
3723 __ LoadU64(kContextRegister,
3724 MemOperand(fp, StackSwitchFrameConstants::kImplicitArgOffset));
3725 GetContextFromImplicitArg(masm, kContextRegister, tmp);
3726
3727 ReloadParentStack(masm, promise, return_value, kContextRegister, tmp, tmp2,
3728 tmp3);
3729 RestoreParentSuspender(masm, tmp);
3730
3731 if (mode == wasm::kPromise) {
3732 __ mov(tmp, Operand(1));
3733 __ StoreU64(
3734 tmp, MemOperand(fp, StackSwitchFrameConstants::kGCScanSlotCountOffset));
3735 __ Push(promise);
3736 __ CallBuiltin(Builtin::kFulfillPromise);
3737 __ Pop(promise);
3738 }
3739 FREE_REG(promise);
3740 FREE_REG(return_value);
3741 __ bind(return_promise);
3742}
3743
3744void GenerateExceptionHandlingLandingPad(MacroAssembler* masm,
3745 RegisterAllocator& regs,
3746 Label* return_promise) {
3747 regs.ResetExcept();
3748 static const Builtin_RejectPromise_InterfaceDescriptor desc;
3749 DEFINE_PINNED(promise, desc.GetRegisterParameter(0));
3750 DEFINE_PINNED(reason, desc.GetRegisterParameter(1));
3751 DEFINE_PINNED(debug_event, desc.GetRegisterParameter(2));
3752 int catch_handler = __ pc_offset();
3753
3754 DEFINE_SCOPED(thread_in_wasm_flag_addr);
3755 thread_in_wasm_flag_addr = r5;
3756
3757 // Unset thread_in_wasm_flag.
3758 __ LoadU64(
3759 thread_in_wasm_flag_addr,
3761 __ mov(r0, Operand(0));
3762 __ StoreU32(r0, MemOperand(thread_in_wasm_flag_addr, 0), no_reg);
3763
3764 // The exception becomes the parameter of the RejectPromise builtin, and the
3765 // promise is the return value of this wrapper.
3766 __ Move(reason, kReturnRegister0);
3767 __ LoadRoot(promise, RootIndex::kActiveSuspender);
3768 __ LoadTaggedField(
3769 promise, FieldMemOperand(promise, WasmSuspenderObject::kPromiseOffset),
3770 r0);
3771
3772 DEFINE_SCOPED(tmp);
3773 DEFINE_SCOPED(tmp2);
3774 DEFINE_SCOPED(tmp3);
3775 __ LoadU64(kContextRegister,
3776 MemOperand(fp, StackSwitchFrameConstants::kImplicitArgOffset));
3777 GetContextFromImplicitArg(masm, kContextRegister, tmp);
3778 ReloadParentStack(masm, promise, reason, kContextRegister, tmp, tmp2, tmp3);
3779 RestoreParentSuspender(masm, tmp);
3780
3781 __ mov(tmp, Operand(1));
3782 __ StoreU64(
3783 tmp, MemOperand(fp, StackSwitchFrameConstants::kGCScanSlotCountOffset));
3784 __ Push(promise);
3785 __ LoadRoot(debug_event, RootIndex::kTrueValue);
3786 __ CallBuiltin(Builtin::kRejectPromise);
3787 __ Pop(promise);
3788
3789 // Run the rest of the wrapper normally (deconstruct the frame, ...).
3790 __ b(return_promise);
3791
3792 masm->isolate()->builtins()->SetJSPIPromptHandlerOffset(catch_handler);
3793}
3794
3795void JSToWasmWrapperHelper(MacroAssembler* masm, wasm::Promise mode) {
3796 bool stack_switch = mode == wasm::kPromise || mode == wasm::kStressSwitch;
3797 auto regs = RegisterAllocator::WithAllocatableGeneralRegisters();
3798
3799 __ EnterFrame(stack_switch ? StackFrame::STACK_SWITCH
3800 : StackFrame::JS_TO_WASM);
3801
3802 __ AllocateStackSpace(StackSwitchFrameConstants::kNumSpillSlots *
3804
3805 // Load the implicit argument (instance data or import data) from the frame.
3807 __ LoadU64(implicit_arg,
3808 MemOperand(fp, JSToWasmWrapperFrameConstants::kImplicitArgOffset));
3809
3810 DEFINE_PINNED(wrapper_buffer,
3812
3813 Label suspend;
3814 Register original_fp = no_reg;
3815 Register new_wrapper_buffer = no_reg;
3816 if (stack_switch) {
3817 SwitchToAllocatedStack(masm, regs, implicit_arg, wrapper_buffer,
3818 original_fp, new_wrapper_buffer, &suspend);
3819 } else {
3820 original_fp = fp;
3821 new_wrapper_buffer = wrapper_buffer;
3822 }
3823
3824 regs.ResetExcept(original_fp, wrapper_buffer, implicit_arg,
3825 new_wrapper_buffer);
3826
3827 {
3828 __ StoreU64(
3829 new_wrapper_buffer,
3830 MemOperand(fp, JSToWasmWrapperFrameConstants::kWrapperBufferOffset));
3831 if (stack_switch) {
3832 __ StoreU64(
3833 implicit_arg,
3834 MemOperand(fp, StackSwitchFrameConstants::kImplicitArgOffset));
3835 DEFINE_SCOPED(scratch)
3836 __ LoadU64(
3837 scratch,
3838 MemOperand(original_fp,
3839 JSToWasmWrapperFrameConstants::kResultArrayParamOffset));
3840 __ StoreU64(
3841 scratch,
3842 MemOperand(fp, StackSwitchFrameConstants::kResultArrayOffset));
3843 }
3844 }
3845 {
3846 DEFINE_SCOPED(result_size);
3847 __ LoadU64(
3848 result_size,
3849 MemOperand(wrapper_buffer, JSToWasmWrapperFrameConstants::
3850 kWrapperBufferStackReturnBufferSize));
3851 __ ShiftLeftU64(r0, result_size, Operand(kSystemPointerSizeLog2));
3852 __ SubS64(sp, sp, r0);
3853 }
3854
3855 __ StoreU64(
3856 sp,
3857 MemOperand(
3858 new_wrapper_buffer,
3859 JSToWasmWrapperFrameConstants::kWrapperBufferStackReturnBufferStart));
3860
3861 if (stack_switch) {
3862 FREE_REG(new_wrapper_buffer)
3863 }
3864 FREE_REG(implicit_arg)
3865 for (auto reg : wasm::kGpParamRegisters) {
3866 regs.Reserve(reg);
3867 }
3868
3869 // The first GP parameter holds the trusted instance data or the import data.
3870 // This is handled specially.
3871 int stack_params_offset =
3874
3875 {
3876 DEFINE_SCOPED(params_start);
3877 __ LoadU64(
3878 params_start,
3879 MemOperand(wrapper_buffer,
3880 JSToWasmWrapperFrameConstants::kWrapperBufferParamStart));
3881 {
3882 // Push stack parameters on the stack.
3883 DEFINE_SCOPED(params_end);
3884 __ LoadU64(
3885 params_end,
3886 MemOperand(wrapper_buffer,
3887 JSToWasmWrapperFrameConstants::kWrapperBufferParamEnd));
3888 DEFINE_SCOPED(last_stack_param);
3889
3890 __ AddS64(last_stack_param, params_start, Operand(stack_params_offset));
3891 Label loop_start;
3892 __ bind(&loop_start);
3893
3894 Label finish_stack_params;
3895 __ CmpS64(last_stack_param, params_end);
3896 __ bge(&finish_stack_params);
3897
3898 // Push parameter
3899 {
3900 __ AddS64(params_end, params_end, Operand(-kSystemPointerSize));
3901 __ LoadU64(r0, MemOperand(params_end), r0);
3902 __ push(r0);
3903 }
3904 __ jmp(&loop_start);
3905
3906 __ bind(&finish_stack_params);
3907 }
3908
3909 size_t next_offset = 0;
3910 for (size_t i = 1; i < arraysize(wasm::kGpParamRegisters); i++) {
3911 // Check that {params_start} does not overlap with any of the parameter
3912 // registers, so that we don't overwrite it by accident with the loads
3913 // below.
3914 DCHECK_NE(params_start, wasm::kGpParamRegisters[i]);
3915 __ LoadU64(wasm::kGpParamRegisters[i],
3916 MemOperand(params_start, next_offset));
3917 next_offset += kSystemPointerSize;
3918 }
3919
3920 for (size_t i = 0; i < arraysize(wasm::kFpParamRegisters); i++) {
3921 __ LoadF64(wasm::kFpParamRegisters[i],
3922 MemOperand(params_start, next_offset));
3923 next_offset += kDoubleSize;
3924 }
3925 DCHECK_EQ(next_offset, stack_params_offset);
3926 }
3927
3928 {
3929 DEFINE_SCOPED(thread_in_wasm_flag_addr);
3930 __ LoadU64(thread_in_wasm_flag_addr,
3933 DEFINE_SCOPED(scratch);
3934 __ mov(scratch, Operand(1));
3935 __ StoreU32(scratch, MemOperand(thread_in_wasm_flag_addr, 0), no_reg);
3936 }
3937
3938 __ Zero(MemOperand(fp, StackSwitchFrameConstants::kGCScanSlotCountOffset));
3939 {
3940 DEFINE_SCOPED(call_target);
3941 __ LoadWasmCodePointer(
3942 call_target,
3943 MemOperand(wrapper_buffer,
3944 JSToWasmWrapperFrameConstants::kWrapperBufferCallTarget));
3945 __ CallWasmCodePointer(call_target);
3946 }
3947
3948 regs.ResetExcept();
3949 // The wrapper_buffer has to be in r5 as the correct parameter register.
3950 regs.Reserve(kReturnRegister0, kReturnRegister1);
3951 ASSIGN_PINNED(wrapper_buffer, r5);
3952 {
3953 DEFINE_SCOPED(thread_in_wasm_flag_addr);
3954 __ LoadU64(thread_in_wasm_flag_addr,
3957 __ mov(r0, Operand(0));
3958 __ StoreU32(r0, MemOperand(thread_in_wasm_flag_addr, 0), no_reg);
3959 }
3960
3961 __ LoadU64(
3962 wrapper_buffer,
3963 MemOperand(fp, JSToWasmWrapperFrameConstants::kWrapperBufferOffset));
3964
3965 __ StoreF64(
3967 MemOperand(
3968 wrapper_buffer,
3969 JSToWasmWrapperFrameConstants::kWrapperBufferFPReturnRegister1));
3970 __ StoreF64(
3972 MemOperand(
3973 wrapper_buffer,
3974 JSToWasmWrapperFrameConstants::kWrapperBufferFPReturnRegister2));
3975 __ StoreU64(
3977 MemOperand(
3978 wrapper_buffer,
3979 JSToWasmWrapperFrameConstants::kWrapperBufferGPReturnRegister1));
3980 __ StoreU64(
3982 MemOperand(
3983 wrapper_buffer,
3984 JSToWasmWrapperFrameConstants::kWrapperBufferGPReturnRegister2));
3985 // Call the return value builtin with
3986 // r3: wasm instance.
3987 // r4: the result JSArray for multi-return.
3988 // r5: pointer to the byte buffer which contains all parameters.
3989 if (stack_switch) {
3990 __ LoadU64(r4,
3991 MemOperand(fp, StackSwitchFrameConstants::kResultArrayOffset));
3992 __ LoadU64(r3,
3993 MemOperand(fp, StackSwitchFrameConstants::kImplicitArgOffset));
3994 } else {
3995 __ LoadU64(
3996 r4,
3997 MemOperand(fp, JSToWasmWrapperFrameConstants::kResultArrayParamOffset));
3998 __ LoadU64(
3999 r3, MemOperand(fp, JSToWasmWrapperFrameConstants::kImplicitArgOffset));
4000 }
4001 Register scratch = r6;
4002 GetContextFromImplicitArg(masm, r3, scratch);
4003
4004 __ CallBuiltin(Builtin::kJSToWasmHandleReturns);
4005
4006 Label return_promise;
4007 if (stack_switch) {
4008 SwitchBackAndReturnPromise(masm, regs, mode, &return_promise);
4009 }
4010 __ bind(&suspend);
4011
4012 __ LeaveFrame(stack_switch ? StackFrame::STACK_SWITCH
4013 : StackFrame::JS_TO_WASM);
4014 // Despite returning to the different location for regular and stack switching
4015 // versions, incoming argument count matches both cases:
4016 // instance and result array without suspend or
4017 // or promise resolve/reject params for callback.
4018 __ AddS64(sp, sp, Operand(2 * kSystemPointerSize));
4019 __ blr();
4020
4021 // Catch handler for the stack-switching wrapper: reject the promise with the
4022 // thrown exception.
4023 if (mode == wasm::kPromise) {
4024 GenerateExceptionHandlingLandingPad(masm, regs, &return_promise);
4025 }
4026}
4027} // namespace
4028
4029void Builtins::Generate_JSToWasmWrapperAsm(MacroAssembler* masm) {
4030 JSToWasmWrapperHelper(masm, wasm::kNoPromise);
4031}
4032
4033void Builtins::Generate_WasmReturnPromiseOnSuspendAsm(MacroAssembler* masm) {
4034 JSToWasmWrapperHelper(masm, wasm::kPromise);
4035}
4036
4037void Builtins::Generate_JSToWasmStressSwitchStacksAsm(MacroAssembler* masm) {
4038 JSToWasmWrapperHelper(masm, wasm::kStressSwitch);
4039}
4040
4041namespace {
4042
4043static constexpr Register kOldSPRegister = r16;
4044
4045void SwitchToTheCentralStackIfNeeded(MacroAssembler* masm, Register argc_input,
4046 Register target_input,
4047 Register argv_input) {
4048 using ER = ExternalReference;
4049
4050 // kOldSPRegister used as a switch flag, if it is zero - no switch performed
4051 // if it is not zero, it contains old sp value.
4052 __ mov(kOldSPRegister, Operand(0));
4053
4054 // Using r5 & r6 as temporary registers, because they will be rewritten
4055 // before exiting to native code anyway.
4056
4057 ER on_central_stack_flag_loc = ER::Create(
4058 IsolateAddressId::kIsOnCentralStackFlagAddress, masm->isolate());
4059 __ Move(ip, on_central_stack_flag_loc);
4060 __ LoadU8(ip, MemOperand(ip));
4061
4062 Label do_not_need_to_switch;
4063 __ CmpU32(ip, Operand(0), r0);
4064 __ bne(&do_not_need_to_switch);
4065
4066 // Switch to central stack.
4067
4068 __ Move(kOldSPRegister, sp);
4069
4070 Register central_stack_sp = r5;
4071 DCHECK(!AreAliased(central_stack_sp, argc_input, argv_input, target_input));
4072 {
4073 __ Push(argc_input);
4074 __ Push(target_input);
4075 __ Push(argv_input);
4076 __ PrepareCallCFunction(2, r0);
4077 __ Move(kCArgRegs[0], ER::isolate_address());
4078 __ Move(kCArgRegs[1], kOldSPRegister);
4079 __ CallCFunction(ER::wasm_switch_to_the_central_stack(), 2,
4081 __ Move(central_stack_sp, kReturnRegister0);
4082 __ Pop(argv_input);
4083 __ Pop(target_input);
4084 __ Pop(argc_input);
4085 }
4086
4087 static constexpr int kReturnAddressSlotOffset = 1 * kSystemPointerSize;
4088 static constexpr int kPadding = 1 * kSystemPointerSize;
4089 __ SubS64(sp, central_stack_sp, Operand(kReturnAddressSlotOffset + kPadding));
4090 __ EnforceStackAlignment();
4091
4092 // Update the sp saved in the frame.
4093 // It will be used to calculate the callee pc during GC.
4094 // The pc is going to be on the new stack segment, so rewrite it here.
4095 __ AddS64(central_stack_sp, sp,
4097 __ StoreU64(central_stack_sp, MemOperand(fp, ExitFrameConstants::kSPOffset));
4098
4099 __ bind(&do_not_need_to_switch);
4100}
4101
4102void SwitchFromTheCentralStackIfNeeded(MacroAssembler* masm) {
4103 using ER = ExternalReference;
4104
4105 Label no_stack_change;
4106
4107 __ CmpU64(kOldSPRegister, Operand(0), r0);
4108 __ beq(&no_stack_change);
4109 __ Move(sp, kOldSPRegister);
4110
4111 {
4113 __ PrepareCallCFunction(1, r0);
4114 __ Move(kCArgRegs[0], ER::isolate_address());
4115 __ CallCFunction(ER::wasm_switch_from_the_central_stack(), 1,
4118 }
4119
4120 __ bind(&no_stack_change);
4121}
4122
4123} // namespace
4124
4125#endif // V8_ENABLE_WEBASSEMBLY
4126
4127void Builtins::Generate_CEntry(MacroAssembler* masm, int result_size,
4128 ArgvMode argv_mode, bool builtin_exit_frame,
4129 bool switch_to_central_stack) {
4130 // Called from JavaScript; parameters are on stack as if calling JS function.
4131 // r3: number of arguments including receiver
4132 // r4: pointer to builtin function
4133 // fp: frame pointer (restored after C call)
4134 // sp: stack pointer (restored as callee's sp after C call)
4135 // cp: current context (C callee-saved)
4136 //
4137 // If argv_mode == ArgvMode::kRegister:
4138 // r5: pointer to the first argument
4139
4140 using ER = ExternalReference;
4141
4142 // Move input arguments to more convenient registers.
4143 static constexpr Register argc_input = r3;
4144 static constexpr Register target_fun = r15; // C callee-saved
4145 static constexpr Register argv = r4;
4146 static constexpr Register scratch = ip;
4147 static constexpr Register argc_sav = r14; // C callee-saved
4148
4149 __ mr(target_fun, argv);
4150
4151 if (argv_mode == ArgvMode::kRegister) {
4152 // Move argv into the correct register.
4153 __ mr(argv, r5);
4154 } else {
4155 // Compute the argv pointer.
4156 __ ShiftLeftU64(argv, argc_input, Operand(kSystemPointerSizeLog2));
4157 __ add(argv, argv, sp);
4158 __ subi(argv, argv, Operand(kSystemPointerSize));
4159 }
4160
4161 // Enter the exit frame that transitions from JavaScript to C++.
4162 FrameScope scope(masm, StackFrame::MANUAL);
4163
4164 int arg_stack_space = 0;
4165
4166 // Pass buffer for return value on stack if necessary
4167 bool needs_return_buffer =
4168 (result_size == 2 && !ABI_RETURNS_OBJECT_PAIRS_IN_REGS);
4169 if (needs_return_buffer) {
4170 arg_stack_space += result_size;
4171 }
4172
4173 __ EnterExitFrame(
4174 scratch, arg_stack_space,
4175 builtin_exit_frame ? StackFrame::BUILTIN_EXIT : StackFrame::EXIT);
4176
4177 // Store a copy of argc in callee-saved registers for later.
4178 __ mr(argc_sav, argc_input);
4179
4180 // r3: number of arguments including receiver
4181 // r14: number of arguments including receiver (C callee-saved)
4182 // r4: pointer to the first argument
4183 // r15: pointer to builtin function (C callee-saved)
4184
4185 // Result returned in registers or stack, depending on result size and ABI.
4186
4187 Register isolate_reg = r5;
4188 if (needs_return_buffer) {
4189 // The return value is a non-scalar value.
4190 // Use frame storage reserved by calling function to pass return
4191 // buffer as implicit first argument.
4192 __ mr(r5, r4);
4193 __ mr(r4, r3);
4194 __ addi(r3, sp,
4196 isolate_reg = r6;
4197 }
4198
4199#if V8_ENABLE_WEBASSEMBLY
4200 if (switch_to_central_stack) {
4201 SwitchToTheCentralStackIfNeeded(masm, argc_input, target_fun, argv);
4202 }
4203#endif // V8_ENABLE_WEBASSEMBLY
4204
4205 // Call C built-in.
4206 __ Move(isolate_reg, ER::isolate_address());
4207 __ StoreReturnAddressAndCall(target_fun);
4208
4209 // If return value is on the stack, pop it to registers.
4210 if (needs_return_buffer) {
4211 __ LoadU64(r4, MemOperand(r3, kSystemPointerSize));
4212 __ LoadU64(r3, MemOperand(r3));
4213 }
4214
4215 // Check result for exception sentinel.
4216 Label exception_returned;
4217 __ CompareRoot(r3, RootIndex::kException);
4218 __ beq(&exception_returned);
4219
4220#if V8_ENABLE_WEBASSEMBLY
4221 if (switch_to_central_stack) {
4222 SwitchFromTheCentralStackIfNeeded(masm);
4223 }
4224#endif // V8_ENABLE_WEBASSEMBLY
4225
4226 // Check that there is no exception, otherwise we
4227 // should have returned the exception sentinel.
4228 if (v8_flags.debug_code) {
4229 Label okay;
4230 ER exception_address =
4231 ER::Create(IsolateAddressId::kExceptionAddress, masm->isolate());
4232 __ LoadU64(scratch,
4233 __ ExternalReferenceAsOperand(exception_address, no_reg));
4234 __ LoadRoot(r0, RootIndex::kTheHoleValue);
4235 __ CompareTagged(r0, scratch);
4236 // Cannot use check here as it attempts to generate call into runtime.
4237 __ beq(&okay);
4238 __ stop();
4239 __ bind(&okay);
4240 }
4241
4242 // Exit C frame and return.
4243 // r3:r4: result
4244 // sp: stack pointer
4245 // fp: frame pointer
4246 // r14: still holds argc (C caller-saved).
4247 __ LeaveExitFrame(scratch);
4248 if (argv_mode == ArgvMode::kStack) {
4249 DCHECK(!AreAliased(scratch, argc_sav));
4250 __ ShiftLeftU64(scratch, argc_sav, Operand(kSystemPointerSizeLog2));
4251 __ AddS64(sp, sp, scratch);
4252 }
4253
4254 __ blr();
4255
4256 // Handling of exception.
4257 __ bind(&exception_returned);
4258
4259 ER pending_handler_context_address = ER::Create(
4260 IsolateAddressId::kPendingHandlerContextAddress, masm->isolate());
4261 ER pending_handler_entrypoint_address = ER::Create(
4262 IsolateAddressId::kPendingHandlerEntrypointAddress, masm->isolate());
4263 ER pending_handler_constant_pool_address = ER::Create(
4264 IsolateAddressId::kPendingHandlerConstantPoolAddress, masm->isolate());
4265 ER pending_handler_fp_address =
4266 ER::Create(IsolateAddressId::kPendingHandlerFPAddress, masm->isolate());
4267 ER pending_handler_sp_address =
4268 ER::Create(IsolateAddressId::kPendingHandlerSPAddress, masm->isolate());
4269
4270 // Ask the runtime for help to determine the handler. This will set r3 to
4271 // contain the current exception, don't clobber it.
4272 {
4273 FrameScope scope(masm, StackFrame::MANUAL);
4274 __ PrepareCallCFunction(3, 0, r3);
4275 __ li(kCArgRegs[0], Operand::Zero());
4276 __ li(kCArgRegs[1], Operand::Zero());
4277 __ Move(kCArgRegs[2], ER::isolate_address());
4278 __ CallCFunction(ER::Create(Runtime::kUnwindAndFindExceptionHandler), 3,
4280 }
4281
4282 // Retrieve the handler context, SP and FP.
4283 __ Move(cp, pending_handler_context_address);
4284 __ LoadU64(cp, MemOperand(cp));
4285 __ Move(sp, pending_handler_sp_address);
4286 __ LoadU64(sp, MemOperand(sp));
4287 __ Move(fp, pending_handler_fp_address);
4288 __ LoadU64(fp, MemOperand(fp));
4289
4290 // If the handler is a JS frame, restore the context to the frame. Note that
4291 // the context will be set to (cp == 0) for non-JS frames.
4292 Label skip;
4293 __ cmpi(cp, Operand::Zero());
4294 __ beq(&skip);
4296 __ bind(&skip);
4297
4298 // Clear c_entry_fp, like we do in `LeaveExitFrame`.
4299 ER c_entry_fp_address =
4300 ER::Create(IsolateAddressId::kCEntryFPAddress, masm->isolate());
4301 __ mov(scratch, Operand::Zero());
4302 __ StoreU64(scratch,
4303 __ ExternalReferenceAsOperand(c_entry_fp_address, no_reg));
4304
4305 // Compute the handler entry address and jump to it.
4306 ConstantPoolUnavailableScope constant_pool_unavailable(masm);
4307 __ LoadU64(
4308 scratch,
4309 __ ExternalReferenceAsOperand(pending_handler_entrypoint_address, no_reg),
4310 r0);
4312 __ Move(kConstantPoolRegister, pending_handler_constant_pool_address);
4314 }
4315 __ Jump(scratch);
4316}
4317
4318#if V8_ENABLE_WEBASSEMBLY
4319void Builtins::Generate_WasmHandleStackOverflow(MacroAssembler* masm) {
4320 using ER = ExternalReference;
4321 Register frame_base = WasmHandleStackOverflowDescriptor::FrameBaseRegister();
4323 {
4324 DCHECK_NE(kCArgRegs[1], frame_base);
4325 DCHECK_NE(kCArgRegs[3], frame_base);
4326 __ mr(kCArgRegs[3], gap);
4327 __ mr(kCArgRegs[1], sp);
4328 __ SubS64(kCArgRegs[2], frame_base, kCArgRegs[1]);
4329 __ mr(kCArgRegs[4], fp);
4330 FrameScope scope(masm, StackFrame::INTERNAL);
4331 __ push(kCArgRegs[3]);
4332 __ PrepareCallCFunction(5, r0);
4333 __ Move(kCArgRegs[0], ER::isolate_address());
4334 __ CallCFunction(ER::wasm_grow_stack(), 5);
4335 __ pop(gap);
4337 }
4338 Label call_runtime;
4339 // wasm_grow_stack returns zero if it cannot grow a stack.
4340 __ CmpU64(kReturnRegister0, Operand(0), r0);
4341 __ beq(&call_runtime);
4342
4343 // Calculate old FP - SP offset to adjust FP accordingly to new SP.
4344 __ SubS64(fp, fp, sp);
4345 __ AddS64(fp, fp, kReturnRegister0);
4346 __ mr(sp, kReturnRegister0);
4347 {
4348 UseScratchRegisterScope temps(masm);
4349 Register scratch = temps.Acquire();
4350 __ mov(scratch,
4351 Operand(StackFrame::TypeToMarker(StackFrame::WASM_SEGMENT_START)));
4352 __ StoreU64(scratch, MemOperand(fp, TypedFrameConstants::kFrameTypeOffset));
4353 }
4354 __ Ret();
4355
4356 __ bind(&call_runtime);
4357 // If wasm_grow_stack returns zero interruption or stack overflow
4358 // should be handled by runtime call.
4359 {
4361 MemOperand(fp, WasmFrameConstants::kWasmInstanceDataOffset));
4362 __ LoadTaggedField(
4363 cp,
4365 WasmTrustedInstanceData::kNativeContextOffset),
4366 r0);
4367 FrameScope scope(masm, StackFrame::MANUAL);
4368 __ EnterFrame(StackFrame::INTERNAL);
4369 __ SmiTag(gap);
4370 __ push(gap);
4371 __ CallRuntime(Runtime::kWasmStackGuard);
4372 __ LeaveFrame(StackFrame::INTERNAL);
4373 __ Ret();
4374 }
4375}
4376#endif // V8_ENABLE_WEBASSEMBLY
4377
4378void Builtins::Generate_DoubleToI(MacroAssembler* masm) {
4379 Label out_of_range, only_low, negate, done, fastpath_done;
4380 Register result_reg = r3;
4381
4382 HardAbortScope hard_abort(masm); // Avoid calls to Abort.
4383
4384 // Immediate values for this stub fit in instructions, so it's safe to use ip.
4385 Register scratch = GetRegisterThatIsNotOneOf(result_reg);
4386 Register scratch_low = GetRegisterThatIsNotOneOf(result_reg, scratch);
4387 Register scratch_high =
4388 GetRegisterThatIsNotOneOf(result_reg, scratch, scratch_low);
4389 DoubleRegister double_scratch = kScratchDoubleReg;
4390
4391 __ Push(result_reg, scratch);
4392 // Account for saved regs.
4393 int argument_offset = 2 * kSystemPointerSize;
4394
4395 // Load double input.
4396 __ lfd(double_scratch, MemOperand(sp, argument_offset));
4397
4398 // Do fast-path convert from double to int.
4399 __ ConvertDoubleToInt64(double_scratch,
4400 result_reg, d0);
4401
4402// Test for overflow
4403 __ TestIfInt32(result_reg, r0);
4404 __ beq(&fastpath_done);
4405
4406 __ Push(scratch_high, scratch_low);
4407 // Account for saved regs.
4408 argument_offset += 2 * kSystemPointerSize;
4409
4410 __ lwz(scratch_high,
4411 MemOperand(sp, argument_offset + Register::kExponentOffset));
4412 __ lwz(scratch_low,
4413 MemOperand(sp, argument_offset + Register::kMantissaOffset));
4414
4415 __ ExtractBitMask(scratch, scratch_high, HeapNumber::kExponentMask);
4416 // Load scratch with exponent - 1. This is faster than loading
4417 // with exponent because Bias + 1 = 1024 which is a *PPC* immediate value.
4418 static_assert(HeapNumber::kExponentBias + 1 == 1024);
4419 __ subi(scratch, scratch, Operand(HeapNumber::kExponentBias + 1));
4420 // If exponent is greater than or equal to 84, the 32 less significant
4421 // bits are 0s (2^84 = 1, 52 significant bits, 32 uncoded bits),
4422 // the result is 0.
4423 // Compare exponent with 84 (compare exponent - 1 with 83).
4424 __ cmpi(scratch, Operand(83));
4425 __ bge(&out_of_range);
4426
4427 // If we reach this code, 31 <= exponent <= 83.
4428 // So, we don't have to handle cases where 0 <= exponent <= 20 for
4429 // which we would need to shift right the high part of the mantissa.
4430 // Scratch contains exponent - 1.
4431 // Load scratch with 52 - exponent (load with 51 - (exponent - 1)).
4432 __ subfic(scratch, scratch, Operand(51));
4433 __ cmpi(scratch, Operand::Zero());
4434 __ ble(&only_low);
4435 // 21 <= exponent <= 51, shift scratch_low and scratch_high
4436 // to generate the result.
4437 __ srw(scratch_low, scratch_low, scratch);
4438 // Scratch contains: 52 - exponent.
4439 // We needs: exponent - 20.
4440 // So we use: 32 - scratch = 32 - 52 + exponent = exponent - 20.
4441 __ subfic(scratch, scratch, Operand(32));
4442 __ ExtractBitMask(result_reg, scratch_high, HeapNumber::kMantissaMask);
4443 // Set the implicit 1 before the mantissa part in scratch_high.
4444 static_assert(HeapNumber::kMantissaBitsInTopWord >= 16);
4445 __ oris(result_reg, result_reg,
4446 Operand(1 << ((HeapNumber::kMantissaBitsInTopWord)-16)));
4447 __ ShiftLeftU32(r0, result_reg, scratch);
4448 __ orx(result_reg, scratch_low, r0);
4449 __ b(&negate);
4450
4451 __ bind(&out_of_range);
4452 __ mov(result_reg, Operand::Zero());
4453 __ b(&done);
4454
4455 __ bind(&only_low);
4456 // 52 <= exponent <= 83, shift only scratch_low.
4457 // On entry, scratch contains: 52 - exponent.
4458 __ neg(scratch, scratch);
4459 __ ShiftLeftU32(result_reg, scratch_low, scratch);
4460
4461 __ bind(&negate);
4462 // If input was positive, scratch_high ASR 31 equals 0 and
4463 // scratch_high LSR 31 equals zero.
4464 // New result = (result eor 0) + 0 = result.
4465 // If the input was negative, we have to negate the result.
4466 // Input_high ASR 31 equals 0xFFFFFFFF and scratch_high LSR 31 equals 1.
4467 // New result = (result eor 0xFFFFFFFF) + 1 = 0 - result.
4468 __ srawi(r0, scratch_high, 31);
4469 __ srdi(r0, r0, Operand(32));
4470 __ xor_(result_reg, result_reg, r0);
4471 __ srwi(r0, scratch_high, Operand(31));
4472 __ add(result_reg, result_reg, r0);
4473
4474 __ bind(&done);
4475 __ Pop(scratch_high, scratch_low);
4476 // Account for saved regs.
4477 argument_offset -= 2 * kSystemPointerSize;
4478
4479 __ bind(&fastpath_done);
4480 __ StoreU64(result_reg, MemOperand(sp, argument_offset));
4481 __ Pop(result_reg, scratch);
4482
4483 __ Ret();
4484}
4485
4486void Builtins::Generate_CallApiCallbackImpl(MacroAssembler* masm,
4487 CallApiCallbackMode mode) {
4488 // ----------- S t a t e -------------
4489 // CallApiCallbackMode::kOptimizedNoProfiling/kOptimized modes:
4490 // -- r4 : api function address
4491 // Both modes:
4492 // -- r5 : arguments count (not including the receiver)
4493 // -- r6 : FunctionTemplateInfo
4494 // -- cp : context
4495 // -- sp[0] : receiver
4496 // -- sp[8] : first argument
4497 // -- ...
4498 // -- sp[(argc) * 8] : last argument
4499 // -----------------------------------
4500
4501 Register function_callback_info_arg = kCArgRegs[0];
4502
4503 Register api_function_address = no_reg;
4504 Register argc = no_reg;
4505 Register func_templ = no_reg;
4506 Register topmost_script_having_context = no_reg;
4507 Register scratch = r7;
4508
4509 switch (mode) {
4511 argc = CallApiCallbackGenericDescriptor::ActualArgumentsCountRegister();
4512 topmost_script_having_context = CallApiCallbackGenericDescriptor::
4514 func_templ =
4516 break;
4517
4520 // Caller context is always equal to current context because we don't
4521 // inline Api calls cross-context.
4522 topmost_script_having_context = kContextRegister;
4523 api_function_address =
4524 CallApiCallbackOptimizedDescriptor::ApiFunctionAddressRegister();
4526 func_templ =
4528 break;
4529 }
4530 DCHECK(!AreAliased(api_function_address, topmost_script_having_context, argc,
4531 func_templ, scratch));
4532
4533 using FCA = FunctionCallbackArguments;
4534 using ER = ExternalReference;
4535 using FC = ApiCallbackExitFrameConstants;
4536
4537 static_assert(FCA::kArgsLength == 6);
4538 static_assert(FCA::kNewTargetIndex == 5);
4539 static_assert(FCA::kTargetIndex == 4);
4540 static_assert(FCA::kReturnValueIndex == 3);
4541 static_assert(FCA::kContextIndex == 2);
4542 static_assert(FCA::kIsolateIndex == 1);
4543 static_assert(FCA::kUnusedIndex == 0);
4544
4545 // Set up FunctionCallbackInfo's implicit_args on the stack as follows:
4546 //
4547 // Target state:
4548 // sp[1 * kSystemPointerSize]: kUnused <= FCA::implicit_args_
4549 // sp[2 * kSystemPointerSize]: kIsolate
4550 // sp[3 * kSystemPointerSize]: kContext
4551 // sp[4 * kSystemPointerSize]: undefined (kReturnValue)
4552 // sp[5 * kSystemPointerSize]: kTarget
4553 // sp[6 * kSystemPointerSize]: undefined (kNewTarget)
4554 // Existing state:
4555 // sp[7 * kSystemPointerSize]: <= FCA:::values_
4556
4557 __ StoreRootRelative(IsolateData::topmost_script_having_context_offset(),
4558 topmost_script_having_context);
4559
4560 if (mode == CallApiCallbackMode::kGeneric) {
4561 api_function_address = ReassignRegister(topmost_script_having_context);
4562 }
4563
4564 // Reserve space on the stack.
4565 __ subi(sp, sp, Operand(FCA::kArgsLength * kSystemPointerSize));
4566
4567 // kIsolate.
4568 __ Move(scratch, ER::isolate_address());
4569 __ StoreU64(scratch, MemOperand(sp, FCA::kIsolateIndex * kSystemPointerSize));
4570
4571 // kContext
4572 __ StoreU64(cp, MemOperand(sp, FCA::kContextIndex * kSystemPointerSize));
4573
4574 // kReturnValue.
4575 __ LoadRoot(scratch, RootIndex::kUndefinedValue);
4576 __ StoreU64(scratch,
4577 MemOperand(sp, FCA::kReturnValueIndex * kSystemPointerSize));
4578
4579 // kTarget.
4580 __ StoreU64(func_templ,
4581 MemOperand(sp, FCA::kTargetIndex * kSystemPointerSize));
4582
4583 // kNewTarget.
4584 __ StoreU64(scratch,
4585 MemOperand(sp, FCA::kNewTargetIndex * kSystemPointerSize));
4586
4587 // kUnused.
4588 __ StoreU64(scratch, MemOperand(sp, FCA::kUnusedIndex * kSystemPointerSize));
4589
4590 FrameScope frame_scope(masm, StackFrame::MANUAL);
4591 if (mode == CallApiCallbackMode::kGeneric) {
4592 __ LoadExternalPointerField(
4593 api_function_address,
4594 FieldMemOperand(func_templ,
4595 FunctionTemplateInfo::kMaybeRedirectedCallbackOffset),
4597 }
4598 __ EnterExitFrame(scratch, FC::getExtraSlotsCountFrom<ExitFrameConstants>(),
4599 StackFrame::API_CALLBACK_EXIT);
4600
4601 MemOperand argc_operand = MemOperand(fp, FC::kFCIArgcOffset);
4602 {
4603 ASM_CODE_COMMENT_STRING(masm, "Initialize v8::FunctionCallbackInfo");
4604 // FunctionCallbackInfo::length_.
4605 // TODO(ishell): pass JSParameterCount(argc) to simplify things on the
4606 // caller end.
4607 __ StoreU64(argc, argc_operand);
4608
4609 // FunctionCallbackInfo::implicit_args_.
4610 __ AddS64(scratch, fp, Operand(FC::kImplicitArgsArrayOffset));
4611 __ StoreU64(scratch, MemOperand(fp, FC::kFCIImplicitArgsOffset));
4612
4613 // FunctionCallbackInfo::values_ (points at JS arguments on the stack).
4614 __ AddS64(scratch, fp, Operand(FC::kFirstArgumentOffset));
4615 __ StoreU64(scratch, MemOperand(fp, FC::kFCIValuesOffset));
4616 }
4617
4618 __ RecordComment("v8::FunctionCallback's argument");
4619 __ AddS64(function_callback_info_arg, fp,
4620 Operand(FC::kFunctionCallbackInfoOffset));
4621
4622 DCHECK(!AreAliased(api_function_address, function_callback_info_arg));
4623
4624 ExternalReference thunk_ref = ER::invoke_function_callback(mode);
4625 Register no_thunk_arg = no_reg;
4626
4627 MemOperand return_value_operand = MemOperand(fp, FC::kReturnValueOffset);
4628 static constexpr int kSlotsToDropOnReturn =
4629 FC::kFunctionCallbackInfoArgsLength + kJSArgcReceiverSlots;
4630
4631 const bool with_profiling =
4633 CallApiFunctionAndReturn(masm, with_profiling, api_function_address,
4634 thunk_ref, no_thunk_arg, kSlotsToDropOnReturn,
4635 &argc_operand, return_value_operand);
4636}
4637
4638void Builtins::Generate_CallApiGetter(MacroAssembler* masm) {
4639 // ----------- S t a t e -------------
4640 // -- cp : context
4641 // -- r4 : receiver
4642 // -- r6 : accessor info
4643 // -- r3 : holder
4644 // -----------------------------------
4645
4646 // Build v8::PropertyCallbackInfo::args_ array on the stack and push property
4647 // name below the exit frame to make GC aware of them.
4648 using PCA = PropertyCallbackArguments;
4649 using ER = ExternalReference;
4650 using FC = ApiAccessorExitFrameConstants;
4651
4652 static_assert(PCA::kPropertyKeyIndex == 0);
4653 static_assert(PCA::kShouldThrowOnErrorIndex == 1);
4654 static_assert(PCA::kHolderIndex == 2);
4655 static_assert(PCA::kIsolateIndex == 3);
4656 static_assert(PCA::kHolderV2Index == 4);
4657 static_assert(PCA::kReturnValueIndex == 5);
4658 static_assert(PCA::kDataIndex == 6);
4659 static_assert(PCA::kThisIndex == 7);
4660 static_assert(PCA::kArgsLength == 8);
4661
4662 // Set up v8::PropertyCallbackInfo's (PCI) args_ on the stack as follows:
4663 // Target state:
4664 // sp[0 * kSystemPointerSize]: name <= PCI::args_
4665 // sp[1 * kSystemPointerSize]: kShouldThrowOnErrorIndex
4666 // sp[2 * kSystemPointerSize]: kHolderIndex
4667 // sp[3 * kSystemPointerSize]: kIsolateIndex
4668 // sp[4 * kSystemPointerSize]: kHolderV2Index
4669 // sp[5 * kSystemPointerSize]: kReturnValueIndex
4670 // sp[6 * kSystemPointerSize]: kDataIndex
4671 // sp[7 * kSystemPointerSize]: kThisIndex / receiver
4672
4673 Register name_arg = kCArgRegs[0];
4674 Register property_callback_info_arg = kCArgRegs[1];
4675
4676 Register api_function_address = r5;
4677 Register receiver = ApiGetterDescriptor::ReceiverRegister();
4680 Register scratch = r7;
4681 Register smi_zero = r8;
4682
4683 DCHECK(!AreAliased(receiver, holder, callback, scratch, smi_zero));
4684
4685 __ LoadTaggedField(scratch,
4686 FieldMemOperand(callback, AccessorInfo::kDataOffset), r0);
4687 __ Push(receiver, scratch); // kThisIndex, kDataIndex
4688 __ LoadRoot(scratch, RootIndex::kUndefinedValue);
4689 __ Move(smi_zero, Smi::zero());
4690 __ Push(scratch, smi_zero); // kReturnValueIndex, kHolderV2Index
4691 __ Move(scratch, ER::isolate_address());
4692 __ Push(scratch, holder); // kIsolateIndex, kHolderIndex
4693
4694 __ LoadTaggedField(name_arg,
4695 FieldMemOperand(callback, AccessorInfo::kNameOffset), r0);
4696 static_assert(kDontThrow == 0);
4697 __ Push(smi_zero, name_arg); // should_throw_on_error -> kDontThrow, name
4698
4699 __ RecordComment("Load api_function_address");
4700 __ LoadExternalPointerField(
4701 api_function_address,
4702 FieldMemOperand(callback, AccessorInfo::kMaybeRedirectedGetterOffset),
4703 kAccessorInfoGetterTag, no_reg, scratch);
4704
4705 FrameScope frame_scope(masm, StackFrame::MANUAL);
4706 __ EnterExitFrame(scratch, FC::getExtraSlotsCountFrom<ExitFrameConstants>(),
4707 StackFrame::API_ACCESSOR_EXIT);
4708
4709 __ RecordComment("Create v8::PropertyCallbackInfo object on the stack.");
4710 // property_callback_info_arg = v8::PropertyCallbackInfo&
4711 __ AddS64(property_callback_info_arg, fp, Operand(FC::kArgsArrayOffset));
4712
4713 DCHECK(!AreAliased(api_function_address, property_callback_info_arg, name_arg,
4714 callback, scratch));
4715
4716#ifdef V8_ENABLE_DIRECT_HANDLE
4717 // name_arg = Local<Name>(name), name value was pushed to GC-ed stack space.
4718 // |name_arg| is already initialized above.
4719#else
4720 // name_arg = Local<Name>(&name), which is &args_array[kPropertyKeyIndex].
4721 static_assert(PCA::kPropertyKeyIndex == 0);
4722 __ mr(name_arg, property_callback_info_arg);
4723#endif
4724
4725 ExternalReference thunk_ref = ER::invoke_accessor_getter_callback();
4726 // Pass AccessorInfo to thunk wrapper in case profiler or side-effect
4727 // checking is enabled.
4728 Register thunk_arg = callback;
4729
4730 MemOperand return_value_operand = MemOperand(fp, FC::kReturnValueOffset);
4731 static constexpr int kSlotsToDropOnReturn =
4732 FC::kPropertyCallbackInfoArgsLength;
4733 MemOperand* const kUseStackSpaceConstant = nullptr;
4734
4735 const bool with_profiling = true;
4736 CallApiFunctionAndReturn(masm, with_profiling, api_function_address,
4737 thunk_ref, thunk_arg, kSlotsToDropOnReturn,
4738 kUseStackSpaceConstant, return_value_operand);
4739}
4740
4741void Builtins::Generate_DirectCEntry(MacroAssembler* masm) {
4742 UseScratchRegisterScope temps(masm);
4743 Register temp2 = temps.Acquire();
4744 // Place the return address on the stack, making the call
4745 // GC safe. The RegExp backend also relies on this.
4746 __ mflr(r0);
4747 __ StoreU64(r0,
4749
4751 // AIX/PPC64BE Linux use a function descriptor;
4754 __ LoadU64(temp2, MemOperand(temp2, 0)); // Instruction address
4755 }
4756
4757 __ Call(temp2); // Call the C++ function.
4758 __ LoadU64(r0,
4760 __ mtlr(r0);
4761 __ blr();
4762}
4763
4764namespace {
4765
4766// This code tries to be close to ia32 code so that any changes can be
4767// easily ported.
4768void Generate_DeoptimizationEntry(MacroAssembler* masm,
4769 DeoptimizeKind deopt_kind) {
4770 Isolate* isolate = masm->isolate();
4771
4772 // Unlike on ARM we don't save all the registers, just the useful ones.
4773 // For the rest, there are gaps on the stack, so the offsets remain the same.
4775
4776 RegList restored_regs = kJSCallerSaved | kCalleeSaved;
4777 RegList saved_regs = restored_regs | sp;
4778
4779 const int kDoubleRegsSize = kDoubleSize * DoubleRegister::kNumRegisters;
4780
4781 // Save all double registers before messing with them.
4782 __ subi(sp, sp, Operand(kDoubleRegsSize));
4783 const RegisterConfiguration* config = RegisterConfiguration::Default();
4784 for (int i = 0; i < config->num_allocatable_double_registers(); ++i) {
4785 int code = config->GetAllocatableDoubleCode(i);
4786 const DoubleRegister dreg = DoubleRegister::from_code(code);
4787 int offset = code * kDoubleSize;
4788 __ stfd(dreg, MemOperand(sp, offset));
4789 }
4790
4791 // Push saved_regs (needed to populate FrameDescription::registers_).
4792 // Leave gaps for other registers.
4793 __ subi(sp, sp, Operand(kNumberOfRegisters * kSystemPointerSize));
4794 for (int16_t i = kNumberOfRegisters - 1; i >= 0; i--) {
4795 if ((saved_regs.bits() & (1 << i)) != 0) {
4796 __ StoreU64(ToRegister(i), MemOperand(sp, kSystemPointerSize * i));
4797 }
4798 }
4799 {
4800 UseScratchRegisterScope temps(masm);
4801 Register scratch = temps.Acquire();
4802 __ Move(scratch, ExternalReference::Create(
4803 IsolateAddressId::kCEntryFPAddress, isolate));
4804 __ StoreU64(fp, MemOperand(scratch));
4805 }
4806 const int kSavedRegistersAreaSize =
4807 (kNumberOfRegisters * kSystemPointerSize) + kDoubleRegsSize;
4808
4809 // Get the address of the location in the code object (r6) (return
4810 // address for lazy deoptimization) and compute the fp-to-sp delta in
4811 // register r7.
4812 __ mflr(r5);
4813 __ addi(r6, sp, Operand(kSavedRegistersAreaSize));
4814 __ sub(r6, fp, r6);
4815
4816 // Allocate a new deoptimizer object.
4817 // Pass six arguments in r3 to r8.
4818 __ PrepareCallCFunction(5, r8);
4819 __ li(r3, Operand::Zero());
4820 Label context_check;
4821 __ LoadU64(r4,
4823 __ JumpIfSmi(r4, &context_check);
4825 __ bind(&context_check);
4826 __ li(r4, Operand(static_cast<int>(deopt_kind)));
4827 // r5: code address or 0 already loaded.
4828 // r6: Fp-to-sp delta already loaded.
4830 // Call Deoptimizer::New().
4831 {
4832 AllowExternalCallThatCantCauseGC scope(masm);
4833 __ CallCFunction(ExternalReference::new_deoptimizer_function(), 5);
4834 }
4835
4836 // Preserve "deoptimizer" object in register r3 and get the input
4837 // frame descriptor pointer to r4 (deoptimizer->input_);
4838 __ LoadU64(r4, MemOperand(r3, Deoptimizer::input_offset()));
4839
4840 // Copy core registers into FrameDescription::registers_[kNumRegisters].
4842 for (int i = 0; i < kNumberOfRegisters; i++) {
4843 int offset =
4845 __ LoadU64(r5, MemOperand(sp, i * kSystemPointerSize));
4846 __ StoreU64(r5, MemOperand(r4, offset));
4847 }
4848
4849 int simd128_regs_offset = FrameDescription::simd128_registers_offset();
4850 // Copy double registers to
4851 // double_registers_[DoubleRegister::kNumRegisters]
4852 for (int i = 0; i < config->num_allocatable_double_registers(); ++i) {
4853 int code = config->GetAllocatableDoubleCode(i);
4854 int dst_offset = code * kSimd128Size + simd128_regs_offset;
4855 int src_offset =
4857 __ lfd(d0, MemOperand(sp, src_offset));
4858 __ stfd(d0, MemOperand(r4, dst_offset));
4859 }
4860
4861 // Mark the stack as not iterable for the CPU profiler which won't be able to
4862 // walk the stack without the return address.
4863 {
4864 UseScratchRegisterScope temps(masm);
4865 Register is_iterable = temps.Acquire();
4866 Register zero = r7;
4867 __ LoadIsolateField(is_iterable, IsolateFieldId::kStackIsIterable);
4868 __ li(zero, Operand(0));
4869 __ stb(zero, MemOperand(is_iterable));
4870 }
4871
4872 // Remove the saved registers from the stack.
4873 __ addi(sp, sp, Operand(kSavedRegistersAreaSize));
4874
4875 // Compute a pointer to the unwinding limit in register r5; that is
4876 // the first stack slot not part of the input frame.
4878 __ add(r5, r5, sp);
4879
4880 // Unwind the stack down to - but not including - the unwinding
4881 // limit and copy the contents of the activation frame to the input
4882 // frame description.
4883 __ addi(r6, r4, Operand(FrameDescription::frame_content_offset()));
4884 Label pop_loop;
4885 Label pop_loop_header;
4886 __ b(&pop_loop_header);
4887 __ bind(&pop_loop);
4888 __ pop(r7);
4889 __ StoreU64(r7, MemOperand(r6, 0));
4890 __ addi(r6, r6, Operand(kSystemPointerSize));
4891 __ bind(&pop_loop_header);
4892 __ CmpS64(r5, sp);
4893 __ bne(&pop_loop);
4894
4895 // Compute the output frame in the deoptimizer.
4896 __ push(r3); // Preserve deoptimizer object across call.
4897 // r3: deoptimizer object; r4: scratch.
4898 __ PrepareCallCFunction(1, r4);
4899 // Call Deoptimizer::ComputeOutputFrames().
4900 {
4901 AllowExternalCallThatCantCauseGC scope(masm);
4902 __ CallCFunction(ExternalReference::compute_output_frames_function(), 1);
4903 }
4904 __ pop(r3); // Restore deoptimizer object (class Deoptimizer).
4905
4907
4908 // Replace the current (input) frame with the output frames.
4909 Label outer_push_loop, inner_push_loop, outer_loop_header, inner_loop_header;
4910 // Outer loop state: r7 = current "FrameDescription** output_",
4911 // r4 = one past the last FrameDescription**.
4913 __ LoadU64(r7,
4914 MemOperand(r3, Deoptimizer::output_offset())); // r7 is output_.
4915 __ ShiftLeftU64(r4, r4, Operand(kSystemPointerSizeLog2));
4916 __ add(r4, r7, r4);
4917 __ b(&outer_loop_header);
4918
4919 __ bind(&outer_push_loop);
4920 // Inner loop state: r5 = current FrameDescription*, r6 = loop index.
4921 __ LoadU64(r5, MemOperand(r7, 0)); // output_[ix]
4923 __ b(&inner_loop_header);
4924
4925 __ bind(&inner_push_loop);
4926 __ addi(r6, r6, Operand(-sizeof(intptr_t)));
4927 __ add(r9, r5, r6);
4929 __ push(r9);
4930
4931 __ bind(&inner_loop_header);
4932 __ cmpi(r6, Operand::Zero());
4933 __ bne(&inner_push_loop); // test for gt?
4934
4935 __ addi(r7, r7, Operand(kSystemPointerSize));
4936 __ bind(&outer_loop_header);
4937 __ CmpS64(r7, r4);
4938 __ blt(&outer_push_loop);
4939
4940 __ LoadU64(r4, MemOperand(r3, Deoptimizer::input_offset()));
4941 for (int i = 0; i < config->num_allocatable_double_registers(); ++i) {
4942 int code = config->GetAllocatableDoubleCode(i);
4943 const DoubleRegister dreg = DoubleRegister::from_code(code);
4944 int src_offset = code * kSimd128Size + simd128_regs_offset;
4945 __ lfd(dreg, MemOperand(r4, src_offset));
4946 }
4947
4948 // Push pc, and continuation from the last output frame.
4949 __ LoadU64(r9, MemOperand(r5, FrameDescription::pc_offset()));
4950 __ push(r9);
4952 __ push(r9);
4953
4954 // Restore the registers from the last output frame.
4955 {
4956 UseScratchRegisterScope temps(masm);
4957 Register scratch = temps.Acquire();
4958 DCHECK(!(restored_regs.has(scratch)));
4959 __ mr(scratch, r5);
4960 for (int i = kNumberOfRegisters - 1; i >= 0; i--) {
4961 int offset =
4963 if ((restored_regs.bits() & (1 << i)) != 0) {
4964 __ LoadU64(ToRegister(i), MemOperand(scratch, offset));
4965 }
4966 }
4967 }
4968
4969 {
4970 UseScratchRegisterScope temps(masm);
4971 Register is_iterable = temps.Acquire();
4972 Register one = r7;
4973 __ push(one); // Save the value from the output FrameDescription.
4974 __ LoadIsolateField(is_iterable, IsolateFieldId::kStackIsIterable);
4975 __ li(one, Operand(1));
4976 __ stb(one, MemOperand(is_iterable));
4977 __ pop(one); // Restore the value from the output FrameDescription.
4978 }
4979
4980 {
4981 UseScratchRegisterScope temps(masm);
4982 Register scratch = temps.Acquire();
4983 __ pop(scratch); // get continuation, leave pc on stack
4984 __ pop(r0);
4985 __ mtlr(r0);
4986 Label end;
4987 __ CmpU64(scratch, Operand::Zero(), r0);
4988 __ beq(&end);
4989 __ Jump(scratch);
4990 __ bind(&end);
4991 __ Ret();
4992 }
4993
4994 __ stop();
4995}
4996
4997} // namespace
4998
4999void Builtins::Generate_DeoptimizationEntry_Eager(MacroAssembler* masm) {
5000 Generate_DeoptimizationEntry(masm, DeoptimizeKind::kEager);
5001}
5002
5003void Builtins::Generate_DeoptimizationEntry_Lazy(MacroAssembler* masm) {
5004 Generate_DeoptimizationEntry(masm, DeoptimizeKind::kLazy);
5005}
5006
5007void Builtins::Generate_RestartFrameTrampoline(MacroAssembler* masm) {
5008 // Frame is being dropped:
5009 // - Look up current function on the frame.
5010 // - Leave the frame.
5011 // - Restart the frame by calling the function.
5012
5015 __ LeaveFrame(StackFrame::INTERPRETED);
5016
5017 // The arguments are already in the stack (including any necessary padding),
5018 // we should not try to massage the arguments again.
5019 __ mov(r5, Operand(kDontAdaptArgumentsSentinel));
5020 __ InvokeFunction(r4, r5, r3, InvokeType::kJump);
5021}
5022
5023#undef __
5024} // namespace internal
5025} // namespace v8
5026
5027#endif // V8_TARGET_ARCH_PPC64
#define one
#define Assert(condition)
const RegList initial_
RegList available_
#define JUMP_IF_EQUAL(NAME)
#define ASSIGN_REG(Name)
RegisterAllocator * allocator_
std::vector< Register * > allocated_registers_
#define ASSIGN_PINNED(Name, Reg)
#define DEFINE_PINNED(Name, Reg)
#define DEFINE_SCOPED(Name)
Register * reg_
#define FREE_REG(Name)
#define DEFINE_REG(Name)
interpreter::Bytecode bytecode
Definition builtins.cc:43
#define RETURN_BYTECODE_LIST(V)
Definition bytecodes.h:575
static void Generate_InterpreterPushArgsThenConstructImpl(MacroAssembler *masm, InterpreterPushArgsMode mode)
static void Generate_CallOrConstructForwardVarargs(MacroAssembler *masm, CallOrConstructMode mode, Builtin target_builtin)
static CallInterfaceDescriptor CallInterfaceDescriptorFor(Builtin builtin)
Definition builtins.cc:189
static void Generate_InterpreterEntryTrampoline(MacroAssembler *masm, InterpreterEntryTrampolineMode mode)
static void Generate_Adaptor(MacroAssembler *masm, int formal_parameter_count, Address builtin_address)
static void Generate_CEntry(MacroAssembler *masm, int result_size, ArgvMode argv_mode, bool builtin_exit_frame, bool switch_to_central_stack)
static constexpr Builtin CallFunction(ConvertReceiverMode=ConvertReceiverMode::kAny)
static constexpr Builtin AdaptorWithBuiltinExitFrame(int formal_parameter_count)
static void Generate_Call(MacroAssembler *masm, ConvertReceiverMode mode)
static void Generate_CallFunction(MacroAssembler *masm, ConvertReceiverMode mode)
static void Generate_CallOrConstructVarargs(MacroAssembler *masm, Builtin target_builtin)
static void Generate_CallApiCallbackImpl(MacroAssembler *masm, CallApiCallbackMode mode)
static constexpr Builtin Call(ConvertReceiverMode=ConvertReceiverMode::kAny)
static void Generate_CallBoundFunctionImpl(MacroAssembler *masm)
static void Generate_ConstructForwardAllArgsImpl(MacroAssembler *masm, ForwardWhichFrame which_frame)
static void Generate_InterpreterPushArgsThenCallImpl(MacroAssembler *masm, ConvertReceiverMode receiver_mode, InterpreterPushArgsMode mode)
static DEFINE_PARAMETERS_VARARGS(kActualArgumentsCount, kTopmostScriptHavingContext, kFunctionTemplateInfo) DEFINE_PARAMETER_TYPES(MachineType constexpr Register TopmostScriptHavingContextRegister()
static DEFINE_PARAMETERS_VARARGS(kApiFunctionAddress, kActualArgumentsCount, kFunctionTemplateInfo) DEFINE_PARAMETER_TYPES(MachineType constexpr Register ActualArgumentsCountRegister()
static constexpr int kContextOrFrameTypeOffset
static constexpr int kFixedSlotCountAboveFp
static constexpr int kFixedFrameSizeAboveFp
static int caller_frame_top_offset()
static int output_count_offset()
static constexpr int kNextExitFrameFPOffset
static constexpr int kNextFastCallFramePCOffset
static V8_EXPORT_PRIVATE ExternalReference isolate_address()
static ExternalReference Create(const SCTableReference &table_ref)
static constexpr int simd128_registers_offset()
static const uint32_t kMantissaMask
Definition heap-number.h:38
static const uint32_t kExponentMask
Definition heap-number.h:37
static const int kMantissaBitsInTopWord
Definition heap-number.h:45
static const int kExponentBias
Definition heap-number.h:41
static constexpr int kHeaderSize
static constexpr int kMapOffset
static constexpr uint32_t thread_in_wasm_flag_address_offset()
Definition isolate.h:1338
static int32_t RootRegisterOffsetForRootIndex(RootIndex root_index)
static V8_INLINE Operand Zero()
static constexpr DwVfpRegister from_code(int8_t code)
constexpr int8_t code() const
static const RegisterConfiguration * Default()
static constexpr Register from_code(int code)
static constexpr int kMantissaOffset
static constexpr int kExponentOffset
static constexpr Register no_reg()
static constexpr Tagged< Smi > FromInt(int value)
Definition smi.h:38
static constexpr Tagged< Smi > zero()
Definition smi.h:99
static constexpr int32_t TypeToMarker(Type type)
Definition frames.h:196
static constexpr int kFixedFrameSizeFromFp
static constexpr int kFrameTypeOffset
static constexpr Simd128RegList kPushedSimd128Regs
static constexpr DoubleRegList kPushedFpRegs
static constexpr int SharedFunctionInfoOffsetInTaggedJSFunction()
#define ASM_CODE_COMMENT_STRING(asm,...)
Definition assembler.h:618
#define ASM_CODE_COMMENT(asm)
Definition assembler.h:617
#define V8_EMBEDDED_CONSTANT_POOL_BOOL
Definition globals.h:81
#define V8_ENABLE_SANDBOX_BOOL
Definition globals.h:160
#define ABI_RETURNS_OBJECT_PAIRS_IN_REGS
#define ABI_USES_FUNCTION_DESCRIPTORS
#define ABI_TOC_REGISTER
int start
int end
bool is_construct
Definition execution.cc:82
#define V8_JITLESS_BOOL
int32_t offset
TNode< Context > context
TNode< Object > receiver
TNode< Object > callback
LiftoffRegister reg
int pc_offset
RegListBase< RegisterT > registers
const int length_
Definition mul-fft.cc:473
int int32_t
Definition unicode.cc:40
void Free(void *memory)
Definition memory.h:63
ApiCallbackExitFrameConstants FC
Definition frames.cc:1270
FunctionCallbackArguments FCA
Definition frames.cc:1271
void push(LiftoffAssembler *assm, LiftoffRegister reg, ValueKind kind, int padding=0)
constexpr int kStackStateOffset
Definition stacks.h:212
constexpr DoubleRegister kFpReturnRegisters[]
constexpr int kStackSpOffset
Definition stacks.h:202
constexpr int kStackFpOffset
Definition stacks.h:204
constexpr Register kGpParamRegisters[]
constexpr DoubleRegister kFpParamRegisters[]
constexpr int kStackParentOffset
Definition stacks.h:210
uint32_t WasmInterpreterRuntime int64_t r0
constexpr Register kGpReturnRegisters[]
constexpr int kStackLimitOffset
Definition stacks.h:208
constexpr int kStackPcOffset
Definition stacks.h:206
constexpr Register no_reg
constexpr Register kRootRegister
constexpr int kByteSize
Definition globals.h:395
constexpr int kFunctionEntryBytecodeOffset
Definition globals.h:854
RegListBase< DoubleRegister > DoubleRegList
Definition reglist-arm.h:15
constexpr int kTaggedSize
Definition globals.h:542
const int kStackFrameLRSlot
constexpr int kSimd128Size
Definition globals.h:706
DwVfpRegister DoubleRegister
constexpr DoubleRegister kScratchDoubleReg
const RegList kCalleeSaved
Definition reglist-arm.h:31
static void Generate_InterpreterEnterBytecode(MacroAssembler *masm)
RegListBase< Register > RegList
Definition reglist-arm.h:14
constexpr Register kJavaScriptCallTargetRegister
const DoubleRegList kCalleeSavedDoubles
Definition reglist-ppc.h:55
const int kStackFrameExtraParamSlot
constexpr int kNumberOfRegisters
constexpr uint16_t kDontAdaptArgumentsSentinel
Definition globals.h:2779
constexpr Register kJavaScriptCallArgCountRegister
constexpr Register kInterpreterAccumulatorRegister
constexpr int kSystemPointerSizeLog2
Definition globals.h:494
InterpreterPushArgsMode
Definition globals.h:2233
constexpr Register kConstantPoolRegister
constexpr int kJSArgcReceiverSlots
Definition globals.h:2778
static void GenerateInterpreterPushArgs(MacroAssembler *masm, Register num_args, Register start_address, Register scratch)
static void AdvanceBytecodeOffsetOrReturn(MacroAssembler *masm, Register bytecode_array, Register bytecode_offset, Register bytecode, Register scratch1, Register scratch2, Register scratch3, Label *if_return)
MemOperand FieldMemOperand(Register object, int offset)
constexpr int kSystemPointerSize
Definition globals.h:410
static void LeaveInterpreterFrame(MacroAssembler *masm, Register scratch1, Register scratch2)
constexpr Register kReturnRegister1
constexpr int kTaggedSizeLog2
Definition globals.h:543
constexpr Register kReturnRegister0
@ LAST_CALLABLE_JS_FUNCTION_TYPE
@ FIRST_CALLABLE_JS_FUNCTION_TYPE
constexpr Register kWasmImplicitArgRegister
constexpr Register kContextRegister
V8_EXPORT_PRIVATE bool AreAliased(const CPURegister &reg1, const CPURegister &reg2, const CPURegister &reg3=NoReg, const CPURegister &reg4=NoReg, const CPURegister &reg5=NoReg, const CPURegister &reg6=NoReg, const CPURegister &reg7=NoReg, const CPURegister &reg8=NoReg)
RegListBase< Simd128Register > Simd128RegList
Definition reglist-ppc.h:16
constexpr Register kInterpreterDispatchTableRegister
const int kHeapObjectTag
Definition v8-internal.h:72
@ kFunctionTemplateInfoCallbackTag
constexpr LowDwVfpRegister kDoubleRegZero
V8_EXPORT_PRIVATE FlagValues v8_flags
constexpr Register kJavaScriptCallExtraArg1Register
const RegList kJSCallerSaved
Definition reglist-arm.h:23
constexpr int JSParameterCount(int param_count_without_receiver)
Definition globals.h:2782
Register ToRegister(int num)
constexpr Register kJavaScriptCallCodeStartRegister
constexpr Register r11
constexpr Register kPtrComprCageBaseRegister
Register ReassignRegister(Register &source)
constexpr Register kWasmCompileLazyFuncIndexRegister
static void AssertCodeIsBaseline(MacroAssembler *masm, Register code, Register scratch)
static void Generate_JSEntryTrampolineHelper(MacroAssembler *masm, bool is_construct)
void CallApiFunctionAndReturn(MacroAssembler *masm, bool with_profiling, Register function_address, ExternalReference thunk_ref, Register thunk_arg, int slots_to_drop_on_return, MemOperand *argc_operand, MemOperand return_value_operand)
Register GetRegisterThatIsNotOneOf(Register reg1, Register reg2=no_reg, Register reg3=no_reg, Register reg4=no_reg, Register reg5=no_reg, Register reg6=no_reg)
constexpr Register cp
constexpr Register kCArgRegs[]
constexpr int kDoubleSize
Definition globals.h:407
static void GetSharedFunctionInfoBytecodeOrBaseline(MacroAssembler *masm, Register sfi, Register bytecode, Register scratch1, Label *is_baseline, Label *is_unavailable)
constexpr Register kInterpreterBytecodeOffsetRegister
constexpr Register kJavaScriptCallNewTargetRegister
constexpr Register kJSFunctionRegister
constexpr Register kInterpreterBytecodeArrayRegister
#define DCHECK_NE(v1, v2)
Definition logging.h:486
#define CHECK_EQ(lhs, rhs)
#define DCHECK(condition)
Definition logging.h:482
#define DCHECK_EQ(v1, v2)
Definition logging.h:485
#define USE(...)
Definition macros.h:293
constexpr T RoundUp(T x, intptr_t m)
Definition macros.h:387
#define arraysize(array)
Definition macros.h:67
#define OFFSET_OF_DATA_START(Type)