v8
V8 is Google’s open source high-performance JavaScript and WebAssembly engine, written in C++.
Loading...
Searching...
No Matches
builtins-arm.cc
Go to the documentation of this file.
1// Copyright 2012 the V8 project authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
5#if V8_TARGET_ARCH_ARM
6
12// For interpreter_entry_return_pc_offset. TODO(jkummerow): Drop.
15#include "src/debug/debug.h"
19#include "src/heap/heap-inl.h"
21#include "src/objects/cell.h"
22#include "src/objects/foreign.h"
26#include "src/objects/smi.h"
27#include "src/runtime/runtime.h"
28
29#if V8_ENABLE_WEBASSEMBLY
34#endif // V8_ENABLE_WEBASSEMBLY
35
36namespace v8 {
37namespace internal {
38
39#define __ ACCESS_MASM(masm)
40
41void Builtins::Generate_Adaptor(MacroAssembler* masm,
42 int formal_parameter_count, Address address) {
43#if defined(__thumb__)
44 // Thumb mode builtin.
45 DCHECK_EQ(1, reinterpret_cast<uintptr_t>(
46 ExternalReference::Create(address).address()) &
47 1);
48#endif
50 __ TailCallBuiltin(
51 Builtins::AdaptorWithBuiltinExitFrame(formal_parameter_count));
52}
53
54namespace {
55
56enum class ArgumentsElementType {
57 kRaw, // Push arguments as they are.
58 kHandle // Dereference arguments before pushing.
59};
60
61void Generate_PushArguments(MacroAssembler* masm, Register array, Register argc,
62 Register scratch,
63 ArgumentsElementType element_type) {
64 DCHECK(!AreAliased(array, argc, scratch));
65 UseScratchRegisterScope temps(masm);
66 Register counter = scratch;
67 Register value = temps.Acquire();
68 Label loop, entry;
69 __ sub(counter, argc, Operand(kJSArgcReceiverSlots));
70 __ b(&entry);
71 __ bind(&loop);
72 __ ldr(value, MemOperand(array, counter, LSL, kSystemPointerSizeLog2));
73 if (element_type == ArgumentsElementType::kHandle) {
74 __ ldr(value, MemOperand(value));
75 }
76 __ push(value);
77 __ bind(&entry);
78 __ sub(counter, counter, Operand(1), SetCC);
79 __ b(ge, &loop);
80}
81
82void Generate_JSBuiltinsConstructStubHelper(MacroAssembler* masm) {
83 // ----------- S t a t e -------------
84 // -- r0 : number of arguments
85 // -- r1 : constructor function
86 // -- r3 : new target
87 // -- cp : context
88 // -- lr : return address
89 // -- sp[...]: constructor arguments
90 // -----------------------------------
91
92 Register scratch = r2;
93
94 Label stack_overflow;
95
96 __ StackOverflowCheck(r0, scratch, &stack_overflow);
97
98 // Enter a construct frame.
99 {
100 FrameAndConstantPoolScope scope(masm, StackFrame::CONSTRUCT);
101
102 // Preserve the incoming parameters on the stack.
103 __ Push(cp, r0);
104
105 // TODO(victorgomes): When the arguments adaptor is completely removed, we
106 // should get the formal parameter count and copy the arguments in its
107 // correct position (including any undefined), instead of delaying this to
108 // InvokeFunction.
109
110 // Set up pointer to first argument (skip receiver).
111 __ add(
112 r4, fp,
114 // Copy arguments and receiver to the expression stack.
115 // r4: Pointer to start of arguments.
116 // r0: Number of arguments.
117 Generate_PushArguments(masm, r4, r0, r5, ArgumentsElementType::kRaw);
118 // The receiver for the builtin/api call.
119 __ PushRoot(RootIndex::kTheHoleValue);
120
121 // Call the function.
122 // r0: number of arguments (untagged)
123 // r1: constructor function
124 // r3: new target
125 __ InvokeFunctionWithNewTarget(r1, r3, r0, InvokeType::kCall);
126
127 // Restore context from the frame.
129 // Restore arguments count from the frame.
131 // Leave construct frame.
132 }
133
134 // Remove caller arguments from the stack and return.
135 __ DropArguments(scratch);
136 __ Jump(lr);
137
138 __ bind(&stack_overflow);
139 {
140 FrameScope scope(masm, StackFrame::INTERNAL);
141 __ CallRuntime(Runtime::kThrowStackOverflow);
142 __ bkpt(0); // Unreachable code.
143 }
144}
145
146} // namespace
147
148// The construct stub for ES5 constructor functions and ES6 class constructors.
149void Builtins::Generate_JSConstructStubGeneric(MacroAssembler* masm) {
150 // ----------- S t a t e -------------
151 // -- r0: number of arguments (untagged)
152 // -- r1: constructor function
153 // -- r3: new target
154 // -- cp: context
155 // -- lr: return address
156 // -- sp[...]: constructor arguments
157 // -----------------------------------
158
159 FrameScope scope(masm, StackFrame::MANUAL);
160 // Enter a construct frame.
161 Label post_instantiation_deopt_entry, not_create_implicit_receiver;
162 __ EnterFrame(StackFrame::CONSTRUCT);
163
164 // Preserve the incoming parameters on the stack.
165 __ LoadRoot(r4, RootIndex::kTheHoleValue);
166 __ Push(cp, r0, r1, r4, r3);
167
168 // ----------- S t a t e -------------
169 // -- sp[0*kPointerSize]: new target
170 // -- sp[1*kPointerSize]: padding
171 // -- r1 and sp[2*kPointerSize]: constructor function
172 // -- sp[3*kPointerSize]: number of arguments
173 // -- sp[4*kPointerSize]: context
174 // -----------------------------------
175
176 __ ldr(r4, FieldMemOperand(r1, JSFunction::kSharedFunctionInfoOffset));
177 __ ldr(r4, FieldMemOperand(r4, SharedFunctionInfo::kFlagsOffset));
178 __ DecodeField<SharedFunctionInfo::FunctionKindBits>(r4);
179 __ JumpIfIsInRange(
180 r4, r4, static_cast<uint32_t>(FunctionKind::kDefaultDerivedConstructor),
181 static_cast<uint32_t>(FunctionKind::kDerivedConstructor),
182 &not_create_implicit_receiver);
183
184 // If not derived class constructor: Allocate the new receiver object.
185 __ CallBuiltin(Builtin::kFastNewObject);
186 __ b(&post_instantiation_deopt_entry);
187
188 // Else: use TheHoleValue as receiver for constructor call
189 __ bind(&not_create_implicit_receiver);
190 __ LoadRoot(r0, RootIndex::kTheHoleValue);
191
192 // ----------- S t a t e -------------
193 // -- r0: receiver
194 // -- Slot 3 / sp[0*kPointerSize]: new target
195 // -- Slot 2 / sp[1*kPointerSize]: constructor function
196 // -- Slot 1 / sp[2*kPointerSize]: number of arguments
197 // -- Slot 0 / sp[3*kPointerSize]: context
198 // -----------------------------------
199 // Deoptimizer enters here.
200 masm->isolate()->heap()->SetConstructStubCreateDeoptPCOffset(
201 masm->pc_offset());
202 __ bind(&post_instantiation_deopt_entry);
203
204 // Restore new target.
205 __ Pop(r3);
206
207 // Push the allocated receiver to the stack.
208 __ Push(r0);
209 // We need two copies because we may have to return the original one
210 // and the calling conventions dictate that the called function pops the
211 // receiver. The second copy is pushed after the arguments, we saved in r6
212 // since r0 needs to store the number of arguments before
213 // InvokingFunction.
214 __ mov(r6, r0);
215
216 // Set up pointer to first argument (skip receiver).
217 __ add(r4, fp,
219
220 // Restore constructor function and argument count.
223
224 Label stack_overflow;
225 __ StackOverflowCheck(r0, r5, &stack_overflow);
226
227 // TODO(victorgomes): When the arguments adaptor is completely removed, we
228 // should get the formal parameter count and copy the arguments in its
229 // correct position (including any undefined), instead of delaying this to
230 // InvokeFunction.
231
232 // Copy arguments to the expression stack.
233 // r4: Pointer to start of argument.
234 // r0: Number of arguments.
235 Generate_PushArguments(masm, r4, r0, r5, ArgumentsElementType::kRaw);
236
237 // Push implicit receiver.
238 __ Push(r6);
239
240 // Call the function.
241 __ InvokeFunctionWithNewTarget(r1, r3, r0, InvokeType::kCall);
242
243 // If the result is an object (in the ECMA sense), we should get rid
244 // of the receiver and use the result; see ECMA-262 section 13.2.2-7
245 // on page 74.
246 Label use_receiver, do_throw, leave_and_return, check_receiver;
247
248 // If the result is undefined, we jump out to using the implicit receiver.
249 __ JumpIfNotRoot(r0, RootIndex::kUndefinedValue, &check_receiver);
250
251 // Otherwise we do a smi check and fall through to check if the return value
252 // is a valid receiver.
253
254 // Throw away the result of the constructor invocation and use the
255 // on-stack receiver as the result.
256 __ bind(&use_receiver);
257 __ ldr(r0, MemOperand(sp, 0 * kPointerSize));
258 __ JumpIfRoot(r0, RootIndex::kTheHoleValue, &do_throw);
259
260 __ bind(&leave_and_return);
261 // Restore arguments count from the frame.
263 // Leave construct frame.
264 __ LeaveFrame(StackFrame::CONSTRUCT);
265
266 // Remove caller arguments from the stack and return.
267 __ DropArguments(r1);
268 __ Jump(lr);
269
270 __ bind(&check_receiver);
271 // If the result is a smi, it is *not* an object in the ECMA sense.
272 __ JumpIfSmi(r0, &use_receiver);
273
274 // If the type of the result (stored in its map) is less than
275 // FIRST_JS_RECEIVER_TYPE, it is not an object in the ECMA sense.
276 static_assert(LAST_JS_RECEIVER_TYPE == LAST_TYPE);
277 __ CompareObjectType(r0, r4, r5, FIRST_JS_RECEIVER_TYPE);
278 __ b(ge, &leave_and_return);
279 __ b(&use_receiver);
280
281 __ bind(&do_throw);
282 // Restore the context from the frame.
284 __ CallRuntime(Runtime::kThrowConstructorReturnedNonObject);
285 __ bkpt(0);
286
287 __ bind(&stack_overflow);
288 // Restore the context from the frame.
290 __ CallRuntime(Runtime::kThrowStackOverflow);
291 // Unreachable code.
292 __ bkpt(0);
293}
294
295void Builtins::Generate_JSBuiltinsConstructStub(MacroAssembler* masm) {
296 Generate_JSBuiltinsConstructStubHelper(masm);
297}
298
299static void AssertCodeIsBaseline(MacroAssembler* masm, Register code,
300 Register scratch) {
301 DCHECK(!AreAliased(code, scratch));
302 // Verify that the code kind is baseline code via the CodeKind.
303 __ ldr(scratch, FieldMemOperand(code, Code::kFlagsOffset));
304 __ DecodeField<Code::KindField>(scratch);
305 __ cmp(scratch, Operand(static_cast<int>(CodeKind::BASELINE)));
306 __ Assert(eq, AbortReason::kExpectedBaselineData);
307}
308
310 MacroAssembler* masm, Register sfi, Register bytecode, Register scratch1,
311 Label* is_baseline, Label* is_unavailable) {
312 ASM_CODE_COMMENT(masm);
313 Label done;
314
315 Register data = bytecode;
316 __ ldr(data,
317 FieldMemOperand(sfi, SharedFunctionInfo::kTrustedFunctionDataOffset));
318
319 __ LoadMap(scratch1, data);
320 __ ldrh(scratch1, FieldMemOperand(scratch1, Map::kInstanceTypeOffset));
321
322#ifndef V8_JITLESS
323 __ cmp(scratch1, Operand(CODE_TYPE));
324 if (v8_flags.debug_code) {
325 Label not_baseline;
326 __ b(ne, &not_baseline);
327 AssertCodeIsBaseline(masm, data, scratch1);
328 __ b(eq, is_baseline);
329 __ bind(&not_baseline);
330 } else {
331 __ b(eq, is_baseline);
332 }
333#endif // !V8_JITLESS
334
335 __ cmp(scratch1, Operand(BYTECODE_ARRAY_TYPE));
336 __ b(eq, &done);
337
338 __ cmp(scratch1, Operand(INTERPRETER_DATA_TYPE));
339 __ b(ne, is_unavailable);
340 __ ldr(data, FieldMemOperand(data, InterpreterData::kBytecodeArrayOffset));
341
342 __ bind(&done);
343}
344
345// static
346void Builtins::Generate_ResumeGeneratorTrampoline(MacroAssembler* masm) {
347 // ----------- S t a t e -------------
348 // -- r0 : the value to pass to the generator
349 // -- r1 : the JSGeneratorObject to resume
350 // -- lr : return address
351 // -----------------------------------
352 // Store input value into generator object.
353 __ str(r0, FieldMemOperand(r1, JSGeneratorObject::kInputOrDebugPosOffset));
354 __ RecordWriteField(r1, JSGeneratorObject::kInputOrDebugPosOffset, r0,
356 // Check that r1 is still valid, RecordWrite might have clobbered it.
357 __ AssertGeneratorObject(r1);
358
359 // Load suspended function and context.
360 __ ldr(r4, FieldMemOperand(r1, JSGeneratorObject::kFunctionOffset));
361 __ ldr(cp, FieldMemOperand(r4, JSFunction::kContextOffset));
362
363 Label prepare_step_in_if_stepping, prepare_step_in_suspended_generator;
364 Label stepping_prepared;
365 Register scratch = r5;
366
367 // Flood function if we are stepping.
368 ExternalReference debug_hook =
369 ExternalReference::debug_hook_on_function_call_address(masm->isolate());
370 __ Move(scratch, debug_hook);
371 __ ldrsb(scratch, MemOperand(scratch));
372 __ cmp(scratch, Operand(0));
373 __ b(ne, &prepare_step_in_if_stepping);
374
375 // Flood function if we need to continue stepping in the suspended
376 // generator.
377 ExternalReference debug_suspended_generator =
378 ExternalReference::debug_suspended_generator_address(masm->isolate());
379 __ Move(scratch, debug_suspended_generator);
380 __ ldr(scratch, MemOperand(scratch));
381 __ cmp(scratch, Operand(r1));
382 __ b(eq, &prepare_step_in_suspended_generator);
383 __ bind(&stepping_prepared);
384
385 // Check the stack for overflow. We are not trying to catch interruptions
386 // (i.e. debug break and preemption) here, so check the "real stack limit".
387 Label stack_overflow;
388 __ LoadStackLimit(scratch, StackLimitKind::kRealStackLimit);
389 __ cmp(sp, scratch);
390 __ b(lo, &stack_overflow);
391
392 // ----------- S t a t e -------------
393 // -- r1 : the JSGeneratorObject to resume
394 // -- r4 : generator function
395 // -- cp : generator context
396 // -- lr : return address
397 // -- sp[0] : generator receiver
398 // -----------------------------------
399
400 // Copy the function arguments from the generator object's register file.
401 __ ldr(r3, FieldMemOperand(r4, JSFunction::kSharedFunctionInfoOffset));
402 __ ldrh(r3,
403 FieldMemOperand(r3, SharedFunctionInfo::kFormalParameterCountOffset));
404 __ sub(r3, r3, Operand(kJSArgcReceiverSlots));
405 __ ldr(r2,
406 FieldMemOperand(r1, JSGeneratorObject::kParametersAndRegistersOffset));
407 {
408 Label done_loop, loop;
409 __ bind(&loop);
410 __ sub(r3, r3, Operand(1), SetCC);
411 __ b(lt, &done_loop);
412 __ add(scratch, r2, Operand(r3, LSL, kTaggedSizeLog2));
413 __ ldr(scratch, FieldMemOperand(scratch, OFFSET_OF_DATA_START(FixedArray)));
414 __ Push(scratch);
415 __ b(&loop);
416 __ bind(&done_loop);
417
418 // Push receiver.
419 __ ldr(scratch, FieldMemOperand(r1, JSGeneratorObject::kReceiverOffset));
420 __ Push(scratch);
421 }
422
423 // Underlying function needs to have bytecode available.
424 if (v8_flags.debug_code) {
425 Label is_baseline, is_unavailable, ok;
426 __ ldr(r3, FieldMemOperand(r4, JSFunction::kSharedFunctionInfoOffset));
427 GetSharedFunctionInfoBytecodeOrBaseline(masm, r3, r3, r0, &is_baseline,
428 &is_unavailable);
429 __ jmp(&ok);
430
431 __ bind(&is_unavailable);
432 __ Abort(AbortReason::kMissingBytecodeArray);
433
434 __ bind(&is_baseline);
435 __ CompareObjectType(r3, r3, r3, CODE_TYPE);
436 __ Assert(eq, AbortReason::kMissingBytecodeArray);
437
438 __ bind(&ok);
439 }
440
441 // Resume (Ignition/TurboFan) generator object.
442 {
443 __ ldr(r0, FieldMemOperand(r4, JSFunction::kSharedFunctionInfoOffset));
444 __ ldrh(r0, FieldMemOperand(
445 r0, SharedFunctionInfo::kFormalParameterCountOffset));
446 // We abuse new.target both to indicate that this is a resume call and to
447 // pass in the generator object. In ordinary calls, new.target is always
448 // undefined because generator functions are non-constructable.
449 __ Move(r3, r1);
450 __ Move(r1, r4);
451 __ JumpJSFunction(r1);
452 }
453
454 __ bind(&prepare_step_in_if_stepping);
455 {
456 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
457 __ Push(r1, r4);
458 // Push hole as receiver since we do not use it for stepping.
459 __ PushRoot(RootIndex::kTheHoleValue);
460 __ CallRuntime(Runtime::kDebugOnFunctionCall);
461 __ Pop(r1);
462 __ ldr(r4, FieldMemOperand(r1, JSGeneratorObject::kFunctionOffset));
463 }
464 __ b(&stepping_prepared);
465
466 __ bind(&prepare_step_in_suspended_generator);
467 {
468 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
469 __ Push(r1);
470 __ CallRuntime(Runtime::kDebugPrepareStepInSuspendedGenerator);
471 __ Pop(r1);
472 __ ldr(r4, FieldMemOperand(r1, JSGeneratorObject::kFunctionOffset));
473 }
474 __ b(&stepping_prepared);
475
476 __ bind(&stack_overflow);
477 {
478 FrameScope scope(masm, StackFrame::INTERNAL);
479 __ CallRuntime(Runtime::kThrowStackOverflow);
480 __ bkpt(0); // This should be unreachable.
481 }
482}
483
484void Builtins::Generate_ConstructedNonConstructable(MacroAssembler* masm) {
485 FrameScope scope(masm, StackFrame::INTERNAL);
486 __ push(r1);
487 __ CallRuntime(Runtime::kThrowConstructedNonConstructable);
488}
489
490namespace {
491
492// Total size of the stack space pushed by JSEntryVariant.
493// JSEntryTrampoline uses this to access on stack arguments passed to
494// JSEntryVariant.
495constexpr int kPushedStackSpace =
498 7 * kPointerSize /* r5, r6, r7, r8, r9, fp, lr */ +
500
501// Assert that the EntryFrameConstants are in sync with the builtin.
502static_assert(kPushedStackSpace ==
504 5 * kPointerSize /* r5, r6, r7, r8, r9*/ +
506 "Pushed stack space and frame constants do not match. See "
507 "frame-constants-arm.h");
508
509// Called with the native C calling convention. The corresponding function
510// signature is either:
511//
512// using JSEntryFunction = GeneratedCode<Address(
513// Address root_register_value, Address new_target, Address target,
514// Address receiver, intptr_t argc, Address** argv)>;
515// or
516// using JSEntryFunction = GeneratedCode<Address(
517// Address root_register_value, MicrotaskQueue* microtask_queue)>;
518void Generate_JSEntryVariant(MacroAssembler* masm, StackFrame::Type type,
519 Builtin entry_trampoline) {
520 // The register state is either:
521 // r0: root_register_value
522 // r1: code entry
523 // r2: function
524 // r3: receiver
525 // [sp + 0 * kSystemPointerSize]: argc
526 // [sp + 1 * kSystemPointerSize]: argv
527 // or
528 // r0: root_register_value
529 // r1: microtask_queue
530 // Preserve all but r0 and pass them to entry_trampoline.
531 Label invoke, handler_entry, exit;
532 const RegList kCalleeSavedWithoutFp = kCalleeSaved - fp;
533
534 // Update |pushed_stack_space| when we manipulate the stack.
536 {
537 NoRootArrayScope no_root_array(masm);
538
539 // Called from C, so do not pop argc and args on exit (preserve sp)
540 // No need to save register-passed args
541 // Save callee-saved registers (incl. cp), but without fp
542 __ stm(db_w, sp, kCalleeSavedWithoutFp);
543 pushed_stack_space +=
545
546 // Save callee-saved vfp registers.
548 pushed_stack_space += kNumDoubleCalleeSaved * kDoubleSize;
549
550 // Set up the reserved register for 0.0.
551 __ vmov(kDoubleRegZero, base::Double(0.0));
552
553 // Initialize the root register.
554 // C calling convention. The first argument is passed in r0.
555 __ mov(kRootRegister, r0);
556 }
557
558 // r0: root_register_value
559
560 // Push a frame with special values setup to mark it as an entry frame.
561 // Clear c_entry_fp, now we've pushed its previous value to the stack.
562 // If the c_entry_fp is not already zero and we don't clear it, the
563 // StackFrameIteratorForProfiler will assume we are executing C++ and miss the
564 // JS frames on top.
565 __ mov(r9, Operand::Zero());
566 __ Move(r4, ExternalReference::Create(IsolateAddressId::kCEntryFPAddress,
567 masm->isolate()));
568 __ ldr(r7, MemOperand(r4));
569 __ str(r9, MemOperand(r4));
570
571 __ LoadIsolateField(r4, IsolateFieldId::kFastCCallCallerFP);
572 __ ldr(r6, MemOperand(r4));
573 __ str(r9, MemOperand(r4));
574
575 __ LoadIsolateField(r4, IsolateFieldId::kFastCCallCallerPC);
576 __ ldr(r5, MemOperand(r4));
577 __ str(r9, MemOperand(r4));
578
579 __ mov(r9, Operand(StackFrame::TypeToMarker(type)));
580 __ mov(r8, Operand(StackFrame::TypeToMarker(type)));
581 __ stm(db_w, sp, {r5, r6, r7, r8, r9, fp, lr});
582 pushed_stack_space += 7 * kPointerSize /* r5, r6, r7, r8, r9, fp, lr */;
583
584 Register scratch = r6;
585
586 // Set up frame pointer for the frame to be pushed.
588
589 // If this is the outermost JS call, set js_entry_sp value.
590 Label non_outermost_js;
591 ExternalReference js_entry_sp = ExternalReference::Create(
592 IsolateAddressId::kJSEntrySPAddress, masm->isolate());
593 __ Move(r5, js_entry_sp);
594 __ ldr(scratch, MemOperand(r5));
595 __ cmp(scratch, Operand::Zero());
596 __ b(ne, &non_outermost_js);
597 __ str(fp, MemOperand(r5));
598 __ mov(scratch, Operand(StackFrame::OUTERMOST_JSENTRY_FRAME));
599 Label cont;
600 __ b(&cont);
601 __ bind(&non_outermost_js);
602 __ mov(scratch, Operand(StackFrame::INNER_JSENTRY_FRAME));
603 __ bind(&cont);
604 __ push(scratch);
605
606 // Jump to a faked try block that does the invoke, with a faked catch
607 // block that sets the exception.
608 __ jmp(&invoke);
609
610 // Block literal pool emission whilst taking the position of the handler
611 // entry. This avoids making the assumption that literal pools are always
612 // emitted after an instruction is emitted, rather than before.
613 {
614 Assembler::BlockConstPoolScope block_const_pool(masm);
615 __ bind(&handler_entry);
616
617 // Store the current pc as the handler offset. It's used later to create the
618 // handler table.
619 masm->isolate()->builtins()->SetJSEntryHandlerOffset(handler_entry.pos());
620
621 // Caught exception: Store result (exception) in the exception
622 // field in the JSEnv and return a failure sentinel. Coming in here the
623 // fp will be invalid because the PushStackHandler below sets it to 0 to
624 // signal the existence of the JSEntry frame.
625 __ Move(scratch, ExternalReference::Create(
626 IsolateAddressId::kExceptionAddress, masm->isolate()));
627 }
628 __ str(r0, MemOperand(scratch));
629 __ LoadRoot(r0, RootIndex::kException);
630 __ b(&exit);
631
632 // Invoke: Link this frame into the handler chain.
633 __ bind(&invoke);
634 // Must preserve r0-r4, r5-r6 are available.
635 __ PushStackHandler();
636 // If an exception not caught by another handler occurs, this handler
637 // returns control to the code after the bl(&invoke) above, which
638 // restores all kCalleeSaved registers (including cp and fp) to their
639 // saved values before returning a failure to C.
640 //
641 // Invoke the function by calling through JS entry trampoline builtin and
642 // pop the faked function when we return.
643 DCHECK_EQ(kPushedStackSpace, pushed_stack_space);
644 USE(pushed_stack_space);
645 __ CallBuiltin(entry_trampoline);
646
647 // Unlink this frame from the handler chain.
648 __ PopStackHandler();
649
650 __ bind(&exit); // r0 holds result
651 // Check if the current stack frame is marked as the outermost JS frame.
652 Label non_outermost_js_2;
653 __ pop(r5);
654 __ cmp(r5, Operand(StackFrame::OUTERMOST_JSENTRY_FRAME));
655 __ b(ne, &non_outermost_js_2);
656 __ mov(r6, Operand::Zero());
657 __ Move(r5, js_entry_sp);
658 __ str(r6, MemOperand(r5));
659 __ bind(&non_outermost_js_2);
660
661 // Restore the top frame descriptors from the stack.
662 __ ldm(ia_w, sp, {r3, r4, r5});
663 __ LoadIsolateField(scratch, IsolateFieldId::kFastCCallCallerFP);
664 __ str(r4, MemOperand(scratch));
665
666 __ LoadIsolateField(scratch, IsolateFieldId::kFastCCallCallerPC);
667 __ str(r3, MemOperand(scratch));
668
669 __ Move(scratch, ExternalReference::Create(IsolateAddressId::kCEntryFPAddress,
670 masm->isolate()));
671 __ str(r5, MemOperand(scratch));
672
673 // Reset the stack to the callee saved registers.
674 __ add(sp, sp,
676 kSystemPointerSize /* already popped the exit frame FP */));
677
678 __ ldm(ia_w, sp, {fp, lr});
679
680 // Restore callee-saved vfp registers.
682
683 __ ldm(ia_w, sp, kCalleeSavedWithoutFp);
684
685 __ mov(pc, lr);
686
687 // Emit constant pool.
688 __ CheckConstPool(true, false);
689}
690
691} // namespace
692
693void Builtins::Generate_JSEntry(MacroAssembler* masm) {
694 Generate_JSEntryVariant(masm, StackFrame::ENTRY, Builtin::kJSEntryTrampoline);
695}
696
697void Builtins::Generate_JSConstructEntry(MacroAssembler* masm) {
698 Generate_JSEntryVariant(masm, StackFrame::CONSTRUCT_ENTRY,
699 Builtin::kJSConstructEntryTrampoline);
700}
701
702void Builtins::Generate_JSRunMicrotasksEntry(MacroAssembler* masm) {
703 Generate_JSEntryVariant(masm, StackFrame::ENTRY,
704 Builtin::kRunMicrotasksTrampoline);
705}
706
707static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm,
708 bool is_construct) {
709 // Called from Generate_JS_Entry
710 // r0: root_register_value
711 // r1: new.target
712 // r2: function
713 // r3: receiver
714 // [fp + kPushedStackSpace + 0 * kSystemPointerSize]: argc
715 // [fp + kPushedStackSpace + 1 * kSystemPointerSize]: argv
716 // r5-r6, r8 and cp may be clobbered
717
718 __ ldr(r0,
719 MemOperand(fp, kPushedStackSpace + EntryFrameConstants::kArgcOffset));
720 __ ldr(r4,
721 MemOperand(fp, kPushedStackSpace + EntryFrameConstants::kArgvOffset));
722
723 // r1: new.target
724 // r2: function
725 // r3: receiver
726 // r0: argc
727 // r4: argv
728
729 // Enter an internal frame.
730 {
731 FrameScope scope(masm, StackFrame::INTERNAL);
732
733 // Setup the context (we need to use the caller context from the isolate).
734 ExternalReference context_address = ExternalReference::Create(
735 IsolateAddressId::kContextAddress, masm->isolate());
736 __ Move(cp, context_address);
737 __ ldr(cp, MemOperand(cp));
738
739 // Push the function.
740 __ Push(r2);
741
742 // Check if we have enough stack space to push all arguments + receiver.
743 // Clobbers r5.
744 Label enough_stack_space, stack_overflow;
745 __ mov(r6, r0);
746 __ StackOverflowCheck(r6, r5, &stack_overflow);
747 __ b(&enough_stack_space);
748 __ bind(&stack_overflow);
749 __ CallRuntime(Runtime::kThrowStackOverflow);
750 // Unreachable code.
751 __ bkpt(0);
752
753 __ bind(&enough_stack_space);
754
755 // Copy arguments to the stack.
756 // r1: new.target
757 // r2: function
758 // r3: receiver
759 // r0: argc
760 // r4: argv, i.e. points to first arg
761 Generate_PushArguments(masm, r4, r0, r5, ArgumentsElementType::kHandle);
762
763 // Push the receiver.
764 __ Push(r3);
765
766 // Setup new.target and function.
767 __ mov(r3, r1);
768 __ mov(r1, r2);
769 // r0: argc
770 // r1: function
771 // r3: new.target
772
773 // Initialize all JavaScript callee-saved registers, since they will be seen
774 // by the garbage collector as part of handlers.
775 __ LoadRoot(r4, RootIndex::kUndefinedValue);
776 __ mov(r2, r4);
777 __ mov(r5, r4);
778 __ mov(r6, r4);
779 __ mov(r8, r4);
780 __ mov(r9, r4);
781
782 // Invoke the code.
783 Builtin builtin = is_construct ? Builtin::kConstruct : Builtins::Call();
784 __ CallBuiltin(builtin);
785
786 // Exit the JS frame and remove the parameters (except function), and
787 // return.
788 // Respect ABI stack constraint.
789 }
790 __ Jump(lr);
791
792 // r0: result
793}
794
795void Builtins::Generate_JSEntryTrampoline(MacroAssembler* masm) {
797}
798
799void Builtins::Generate_JSConstructEntryTrampoline(MacroAssembler* masm) {
801}
802
803void Builtins::Generate_RunMicrotasksTrampoline(MacroAssembler* masm) {
804 // This expects two C++ function parameters passed by Invoke() in
805 // execution.cc.
806 // r0: root_register_value
807 // r1: microtask_queue
808
810 __ TailCallBuiltin(Builtin::kRunMicrotasks);
811}
812
813static void LeaveInterpreterFrame(MacroAssembler* masm, Register scratch1,
814 Register scratch2) {
815 ASM_CODE_COMMENT(masm);
816 Register params_size = scratch1;
817 // Get the size of the formal parameters + receiver (in bytes).
818 __ ldr(params_size,
820 __ ldrh(params_size,
821 FieldMemOperand(params_size, BytecodeArray::kParameterSizeOffset));
822
823 Register actual_params_size = scratch2;
824 // Compute the size of the actual parameters + receiver (in bytes).
825 __ ldr(actual_params_size,
827
828 // If actual is bigger than formal, then we should use it to free up the stack
829 // arguments.
830 __ cmp(params_size, actual_params_size);
831 __ mov(params_size, actual_params_size, LeaveCC, kLessThan);
832
833 // Leave the frame (also dropping the register file).
834 __ LeaveFrame(StackFrame::INTERPRETED);
835
836 // Drop receiver + arguments.
837 __ DropArguments(params_size);
838}
839
840// Advance the current bytecode offset. This simulates what all bytecode
841// handlers do upon completion of the underlying operation. Will bail out to a
842// label if the bytecode (without prefix) is a return bytecode. Will not advance
843// the bytecode offset if the current bytecode is a JumpLoop, instead just
844// re-executing the JumpLoop to jump to the correct bytecode.
845static void AdvanceBytecodeOffsetOrReturn(MacroAssembler* masm,
846 Register bytecode_array,
847 Register bytecode_offset,
848 Register bytecode, Register scratch1,
849 Register scratch2, Label* if_return) {
850 ASM_CODE_COMMENT(masm);
851 Register bytecode_size_table = scratch1;
852
853 // The bytecode offset value will be increased by one in wide and extra wide
854 // cases. In the case of having a wide or extra wide JumpLoop bytecode, we
855 // will restore the original bytecode. In order to simplify the code, we have
856 // a backup of it.
857 Register original_bytecode_offset = scratch2;
858 DCHECK(!AreAliased(bytecode_array, bytecode_offset, bytecode_size_table,
859 bytecode, original_bytecode_offset));
860
861 __ Move(bytecode_size_table,
862 ExternalReference::bytecode_size_table_address());
863 __ Move(original_bytecode_offset, bytecode_offset);
864
865 // Check if the bytecode is a Wide or ExtraWide prefix bytecode.
866 Label process_bytecode;
867 static_assert(0 == static_cast<int>(interpreter::Bytecode::kWide));
868 static_assert(1 == static_cast<int>(interpreter::Bytecode::kExtraWide));
869 static_assert(2 == static_cast<int>(interpreter::Bytecode::kDebugBreakWide));
870 static_assert(3 ==
871 static_cast<int>(interpreter::Bytecode::kDebugBreakExtraWide));
872 __ cmp(bytecode, Operand(0x3));
873 __ b(hi, &process_bytecode);
874 __ tst(bytecode, Operand(0x1));
875 // Load the next bytecode.
876 __ add(bytecode_offset, bytecode_offset, Operand(1));
877 __ ldrb(bytecode, MemOperand(bytecode_array, bytecode_offset));
878
879 // Update table to the wide scaled table.
880 __ add(bytecode_size_table, bytecode_size_table,
882 // Conditionally update table to the extra wide scaled table. We are taking
883 // advantage of the fact that the extra wide follows the wide one.
884 __ add(bytecode_size_table, bytecode_size_table,
886 ne);
887
888 __ bind(&process_bytecode);
889
890 // Bailout to the return label if this is a return bytecode.
891
892 // Create cmp, cmpne, ..., cmpne to check for a return bytecode.
893 Condition flag = al;
894#define JUMP_IF_EQUAL(NAME) \
895 __ cmp(bytecode, Operand(static_cast<int>(interpreter::Bytecode::k##NAME)), \
896 flag); \
897 flag = ne;
899#undef JUMP_IF_EQUAL
900
901 __ b(if_return, eq);
902
903 // If this is a JumpLoop, re-execute it to perform the jump to the beginning
904 // of the loop.
905 Label end, not_jump_loop;
906 __ cmp(bytecode, Operand(static_cast<int>(interpreter::Bytecode::kJumpLoop)));
907 __ b(ne, &not_jump_loop);
908 // We need to restore the original bytecode_offset since we might have
909 // increased it to skip the wide / extra-wide prefix bytecode.
910 __ Move(bytecode_offset, original_bytecode_offset);
911 __ b(&end);
912
913 __ bind(&not_jump_loop);
914 // Otherwise, load the size of the current bytecode and advance the offset.
915 __ ldrb(scratch1, MemOperand(bytecode_size_table, bytecode));
916 __ add(bytecode_offset, bytecode_offset, scratch1);
917
918 __ bind(&end);
919}
920
921namespace {
922
923void ResetSharedFunctionInfoAge(MacroAssembler* masm, Register sfi,
924 Register scratch) {
925 DCHECK(!AreAliased(sfi, scratch));
926 __ mov(scratch, Operand(0));
927 __ strh(scratch, FieldMemOperand(sfi, SharedFunctionInfo::kAgeOffset));
928}
929
930void ResetJSFunctionAge(MacroAssembler* masm, Register js_function,
931 Register scratch1, Register scratch2) {
932 __ Move(scratch1,
933 FieldMemOperand(js_function, JSFunction::kSharedFunctionInfoOffset));
934 ResetSharedFunctionInfoAge(masm, scratch1, scratch2);
935}
936
937void ResetFeedbackVectorOsrUrgency(MacroAssembler* masm,
938 Register feedback_vector, Register scratch) {
939 DCHECK(!AreAliased(feedback_vector, scratch));
940 __ ldrb(scratch,
941 FieldMemOperand(feedback_vector, FeedbackVector::kOsrStateOffset));
942 __ and_(scratch, scratch, Operand(~FeedbackVector::OsrUrgencyBits::kMask));
943 __ strb(scratch,
944 FieldMemOperand(feedback_vector, FeedbackVector::kOsrStateOffset));
945}
946
947} // namespace
948
949// static
950void Builtins::Generate_BaselineOutOfLinePrologue(MacroAssembler* masm) {
951 UseScratchRegisterScope temps(masm);
952 // Need a few extra registers
953 temps.Include({r4, r5, r8, r9});
954
955 auto descriptor =
956 Builtins::CallInterfaceDescriptorFor(Builtin::kBaselineOutOfLinePrologue);
957 Register closure = descriptor.GetRegisterParameter(
958 BaselineOutOfLinePrologueDescriptor::kClosure);
959 // Load the feedback cell and vector from the closure.
960 Register feedback_cell = temps.Acquire();
961 Register feedback_vector = temps.Acquire();
962 __ ldr(feedback_cell,
963 FieldMemOperand(closure, JSFunction::kFeedbackCellOffset));
964 __ ldr(feedback_vector,
965 FieldMemOperand(feedback_cell, FeedbackCell::kValueOffset));
966 {
967 UseScratchRegisterScope temps(masm);
968 Register temporary = temps.Acquire();
969 __ AssertFeedbackVector(feedback_vector, temporary);
970 }
971
972#ifndef V8_ENABLE_LEAPTIERING
973 // Check the tiering state.
974 Label flags_need_processing;
975 Register flags = no_reg;
976 {
977 UseScratchRegisterScope temps(masm);
978 // flags will be used only in |flags_need_processing|
979 // and outside it can be reused.
980 flags = temps.Acquire();
981 __ LoadFeedbackVectorFlagsAndJumpIfNeedsProcessing(
982 flags, feedback_vector, CodeKind::BASELINE, &flags_need_processing);
983 }
984#endif // !V8_ENABLE_LEAPTIERING
985
986 {
987 UseScratchRegisterScope temps(masm);
988 ResetFeedbackVectorOsrUrgency(masm, feedback_vector, temps.Acquire());
989 }
990
991 // Increment invocation count for the function.
992 {
993 UseScratchRegisterScope temps(masm);
994 Register invocation_count = temps.Acquire();
995 __ ldr(invocation_count,
996 FieldMemOperand(feedback_vector,
997 FeedbackVector::kInvocationCountOffset));
998 __ add(invocation_count, invocation_count, Operand(1));
999 __ str(invocation_count,
1000 FieldMemOperand(feedback_vector,
1001 FeedbackVector::kInvocationCountOffset));
1002 }
1003
1004 FrameScope frame_scope(masm, StackFrame::MANUAL);
1005 {
1006 ASM_CODE_COMMENT_STRING(masm, "Frame Setup");
1007 // Normally the first thing we'd do here is Push(lr, fp), but we already
1008 // entered the frame in BaselineCompiler::Prologue, as we had to use the
1009 // value lr before the call to this BaselineOutOfLinePrologue builtin.
1010
1011 Register callee_context = descriptor.GetRegisterParameter(
1012 BaselineOutOfLinePrologueDescriptor::kCalleeContext);
1013 Register callee_js_function = descriptor.GetRegisterParameter(
1014 BaselineOutOfLinePrologueDescriptor::kClosure);
1015 {
1016 UseScratchRegisterScope temps(masm);
1017 ResetJSFunctionAge(masm, callee_js_function, temps.Acquire(),
1018 temps.Acquire());
1019 }
1020 __ Push(callee_context, callee_js_function);
1021 DCHECK_EQ(callee_js_function, kJavaScriptCallTargetRegister);
1022 DCHECK_EQ(callee_js_function, kJSFunctionRegister);
1023
1024 Register argc = descriptor.GetRegisterParameter(
1025 BaselineOutOfLinePrologueDescriptor::kJavaScriptCallArgCount);
1026 // We'll use the bytecode for both code age/OSR resetting, and pushing onto
1027 // the frame, so load it into a register.
1028 Register bytecodeArray = descriptor.GetRegisterParameter(
1029 BaselineOutOfLinePrologueDescriptor::kInterpreterBytecodeArray);
1030 __ Push(argc, bytecodeArray);
1031 if (v8_flags.debug_code) {
1032 UseScratchRegisterScope temps(masm);
1033 Register scratch = temps.Acquire();
1034 __ CompareObjectType(feedback_vector, scratch, scratch,
1035 FEEDBACK_VECTOR_TYPE);
1036 __ Assert(eq, AbortReason::kExpectedFeedbackVector);
1037 }
1038 __ Push(feedback_cell);
1039 __ Push(feedback_vector);
1040 }
1041
1042 Label call_stack_guard;
1043 Register frame_size = descriptor.GetRegisterParameter(
1044 BaselineOutOfLinePrologueDescriptor::kStackFrameSize);
1045 {
1046 ASM_CODE_COMMENT_STRING(masm, "Stack/interrupt check");
1047 // Stack check. This folds the checks for both the interrupt stack limit
1048 // check and the real stack limit into one by just checking for the
1049 // interrupt limit. The interrupt limit is either equal to the real stack
1050 // limit or tighter. By ensuring we have space until that limit after
1051 // building the frame we can quickly precheck both at once.
1052 UseScratchRegisterScope temps(masm);
1053
1054 Register sp_minus_frame_size = temps.Acquire();
1055 __ sub(sp_minus_frame_size, sp, frame_size);
1056 Register interrupt_limit = temps.Acquire();
1057 __ LoadStackLimit(interrupt_limit, StackLimitKind::kInterruptStackLimit);
1058 __ cmp(sp_minus_frame_size, interrupt_limit);
1059 __ b(&call_stack_guard, lo);
1060 }
1061
1062 // Do "fast" return to the caller pc in lr.
1063 __ LoadRoot(kInterpreterAccumulatorRegister, RootIndex::kUndefinedValue);
1064 __ Ret();
1065
1066#ifndef V8_ENABLE_LEAPTIERING
1067 __ bind(&flags_need_processing);
1068 {
1069 ASM_CODE_COMMENT_STRING(masm, "Optimized marker check");
1070 UseScratchRegisterScope temps(masm);
1071 // Ensure the flags is not allocated again.
1072 temps.Exclude(flags);
1073
1074 // Drop the frame created by the baseline call.
1075 __ ldm(ia_w, sp, {fp, lr});
1076 __ OptimizeCodeOrTailCallOptimizedCodeSlot(flags, feedback_vector);
1077 __ Trap();
1078 }
1079#endif // !V8_ENABLE_LEAPTIERING
1080
1081 __ bind(&call_stack_guard);
1082 {
1083 ASM_CODE_COMMENT_STRING(masm, "Stack/interrupt call");
1084 FrameScope frame_scope(masm, StackFrame::INTERNAL);
1085 // Save incoming new target or generator
1087 __ SmiTag(frame_size);
1088 __ Push(frame_size);
1089 __ CallRuntime(Runtime::kStackGuardWithGap);
1091 }
1092
1093 __ LoadRoot(kInterpreterAccumulatorRegister, RootIndex::kUndefinedValue);
1094 __ Ret();
1095}
1096
1097// static
1098void Builtins::Generate_BaselineOutOfLinePrologueDeopt(MacroAssembler* masm) {
1099 // We're here because we got deopted during BaselineOutOfLinePrologue's stack
1100 // check. Undo all its frame creation and call into the interpreter instead.
1101
1102 // Drop the feedback vector, the bytecode offset (was the feedback vector but
1103 // got replaced during deopt) and bytecode array.
1104 __ Drop(3);
1105
1106 // Context, closure, argc.
1109
1110 // Drop frame pointer
1111 __ LeaveFrame(StackFrame::BASELINE);
1112
1113 // Enter the interpreter.
1114 __ TailCallBuiltin(Builtin::kInterpreterEntryTrampoline);
1115}
1116
1117// Generate code for entering a JS function with the interpreter.
1118// On entry to the function the receiver and arguments have been pushed on the
1119// stack left to right.
1120//
1121// The live registers are:
1122// o r0: actual argument count
1123// o r1: the JS function object being called.
1124// o r3: the incoming new target or generator object
1125// o cp: our context
1126// o fp: the caller's frame pointer
1127// o sp: stack pointer
1128// o lr: return address
1129//
1130// The function builds an interpreter frame. See InterpreterFrameConstants in
1131// frame-constants.h for its layout.
1133 MacroAssembler* masm, InterpreterEntryTrampolineMode mode) {
1134 Register closure = r1;
1135
1136 // Get the bytecode array from the function object and load it into
1137 // kInterpreterBytecodeArrayRegister.
1138 __ ldr(r4, FieldMemOperand(closure, JSFunction::kSharedFunctionInfoOffset));
1139 ResetSharedFunctionInfoAge(masm, r4, r8);
1140
1141 // The bytecode array could have been flushed from the shared function info,
1142 // if so, call into CompileLazy.
1143 Label is_baseline, compile_lazy;
1146 &is_baseline, &compile_lazy);
1147
1148 Label push_stack_frame;
1149 Register feedback_vector = r2;
1150 __ LoadFeedbackVector(feedback_vector, closure, r4, &push_stack_frame);
1151
1152#ifndef V8_JITLESS
1153#ifndef V8_ENABLE_LEAPTIERING
1154 // If feedback vector is valid, check for optimized code and update invocation
1155 // count.
1156 Register flags = r4;
1157 Label flags_need_processing;
1158 __ LoadFeedbackVectorFlagsAndJumpIfNeedsProcessing(
1159 flags, feedback_vector, CodeKind::INTERPRETED_FUNCTION,
1160 &flags_need_processing);
1161#endif // !V8_ENABLE_LEAPTIERING
1162
1163 ResetFeedbackVectorOsrUrgency(masm, feedback_vector, r4);
1164
1165 // Increment invocation count for the function.
1166 __ ldr(r9, FieldMemOperand(feedback_vector,
1167 FeedbackVector::kInvocationCountOffset));
1168 __ add(r9, r9, Operand(1));
1169 __ str(r9, FieldMemOperand(feedback_vector,
1170 FeedbackVector::kInvocationCountOffset));
1171
1172 // Open a frame scope to indicate that there is a frame on the stack. The
1173 // MANUAL indicates that the scope shouldn't actually generate code to set up
1174 // the frame (that is done below).
1175#else
1176 // Note: By omitting the above code in jitless mode we also disable:
1177 // - kFlagsLogNextExecution: only used for logging/profiling; and
1178 // - kInvocationCountOffset: only used for tiering heuristics and code
1179 // coverage.
1180#endif // !V8_JITLESS
1181
1182 __ bind(&push_stack_frame);
1183 FrameScope frame_scope(masm, StackFrame::MANUAL);
1184 __ PushStandardFrame(closure);
1185
1186 // Load the initial bytecode offset.
1189
1190 // Push bytecode array and Smi tagged bytecode array offset.
1192 __ Push(kInterpreterBytecodeArrayRegister, r4, feedback_vector);
1193
1194 // Allocate the local and temporary register file on the stack.
1195 Label stack_overflow;
1196 {
1197 // Load frame size from the BytecodeArray object.
1199 BytecodeArray::kFrameSizeOffset));
1200
1201 // Do a stack check to ensure we don't go over the limit.
1202 __ sub(r9, sp, Operand(r4));
1203 __ LoadStackLimit(r2, StackLimitKind::kRealStackLimit);
1204 __ cmp(r9, Operand(r2));
1205 __ b(lo, &stack_overflow);
1206
1207 // If ok, push undefined as the initial value for all register file entries.
1208 Label loop_header;
1209 Label loop_check;
1210 __ LoadRoot(kInterpreterAccumulatorRegister, RootIndex::kUndefinedValue);
1211 __ b(&loop_check, al);
1212 __ bind(&loop_header);
1213 // TODO(rmcilroy): Consider doing more than one push per loop iteration.
1215 // Continue loop if not done.
1216 __ bind(&loop_check);
1217 __ sub(r4, r4, Operand(kPointerSize), SetCC);
1218 __ b(&loop_header, ge);
1219 }
1220
1221 // If the bytecode array has a valid incoming new target or generator object
1222 // register, initialize it with incoming value which was passed in r3.
1223 __ ldr(r9, FieldMemOperand(
1225 BytecodeArray::kIncomingNewTargetOrGeneratorRegisterOffset));
1226 __ cmp(r9, Operand::Zero());
1227 __ str(r3, MemOperand(fp, r9, LSL, kPointerSizeLog2), ne);
1228
1229 // Perform interrupt stack check.
1230 // TODO(solanes): Merge with the real stack limit check above.
1231 Label stack_check_interrupt, after_stack_check_interrupt;
1232 __ LoadStackLimit(r4, StackLimitKind::kInterruptStackLimit);
1233 __ cmp(sp, r4);
1234 __ b(lo, &stack_check_interrupt);
1235 __ bind(&after_stack_check_interrupt);
1236
1237 // The accumulator is already loaded with undefined.
1238
1239 // Load the dispatch table into a register and dispatch to the bytecode
1240 // handler at the current bytecode offset.
1241 Label do_dispatch;
1242 __ bind(&do_dispatch);
1243 __ Move(
1245 ExternalReference::interpreter_dispatch_table_address(masm->isolate()));
1248 __ ldr(
1252
1253 __ RecordComment("--- InterpreterEntryReturnPC point ---");
1255 masm->isolate()->heap()->SetInterpreterEntryReturnPCOffset(
1256 masm->pc_offset());
1257 } else {
1259 // Both versions must be the same up to this point otherwise the builtins
1260 // will not be interchangable.
1261 CHECK_EQ(
1262 masm->isolate()->heap()->interpreter_entry_return_pc_offset().value(),
1263 masm->pc_offset());
1264 }
1265
1266 // Any returns to the entry trampoline are either due to the return bytecode
1267 // or the interpreter tail calling a builtin and then a dispatch.
1268
1269 // Get bytecode array and bytecode offset from the stack frame.
1275
1276 // Either return, or advance to the next bytecode and dispatch.
1277 Label do_return;
1282 &do_return);
1283 __ jmp(&do_dispatch);
1284
1285 __ bind(&do_return);
1286 // The return value is in r0.
1287 LeaveInterpreterFrame(masm, r2, r4);
1288 __ Jump(lr);
1289
1290 __ bind(&stack_check_interrupt);
1291 // Modify the bytecode offset in the stack to be kFunctionEntryBytecodeOffset
1292 // for the call to the StackGuard.
1298 __ CallRuntime(Runtime::kStackGuard);
1299
1300 // After the call, restore the bytecode array, bytecode offset and accumulator
1301 // registers again. Also, restore the bytecode offset in the stack to its
1302 // previous value.
1307 __ LoadRoot(kInterpreterAccumulatorRegister, RootIndex::kUndefinedValue);
1308
1311
1312 __ jmp(&after_stack_check_interrupt);
1313
1314#ifndef V8_JITLESS
1315#ifndef V8_ENABLE_LEAPTIERING
1316 __ bind(&flags_need_processing);
1317 __ OptimizeCodeOrTailCallOptimizedCodeSlot(flags, feedback_vector);
1318#endif // !V8_ENABLE_LEAPTIERING
1319
1320 __ bind(&is_baseline);
1321 {
1322#ifndef V8_ENABLE_LEAPTIERING
1323 // Load the feedback vector from the closure.
1324 __ ldr(feedback_vector,
1325 FieldMemOperand(closure, JSFunction::kFeedbackCellOffset));
1326 __ ldr(feedback_vector,
1327 FieldMemOperand(feedback_vector, FeedbackCell::kValueOffset));
1328
1329 Label install_baseline_code;
1330 // Check if feedback vector is valid. If not, call prepare for baseline to
1331 // allocate it.
1332 __ ldr(r8, FieldMemOperand(feedback_vector, HeapObject::kMapOffset));
1333 __ ldrh(r8, FieldMemOperand(r8, Map::kInstanceTypeOffset));
1334 __ cmp(r8, Operand(FEEDBACK_VECTOR_TYPE));
1335 __ b(ne, &install_baseline_code);
1336
1337 // Check the tiering state.
1338 __ LoadFeedbackVectorFlagsAndJumpIfNeedsProcessing(
1339 flags, feedback_vector, CodeKind::BASELINE, &flags_need_processing);
1340
1341 // oad the baseline code into the closure.
1343 static_assert(kJavaScriptCallCodeStartRegister == r2, "ABI mismatch");
1344 __ ReplaceClosureCodeWithOptimizedCode(r2, closure);
1345 __ JumpCodeObject(r2);
1346
1347 __ bind(&install_baseline_code);
1348#endif // !V8_ENABLE_LEAPTIERING
1349 __ GenerateTailCallToReturnedCode(Runtime::kInstallBaselineCode);
1350 }
1351#endif // !V8_JITLESS
1352
1353 __ bind(&compile_lazy);
1354 __ GenerateTailCallToReturnedCode(Runtime::kCompileLazy);
1355
1356 __ bind(&stack_overflow);
1357 __ CallRuntime(Runtime::kThrowStackOverflow);
1358 __ bkpt(0); // Should not return.
1359}
1360
1361static void GenerateInterpreterPushArgs(MacroAssembler* masm, Register num_args,
1362 Register start_address,
1363 Register scratch) {
1364 ASM_CODE_COMMENT(masm);
1365 // Find the argument with lowest address.
1366 __ sub(scratch, num_args, Operand(1));
1367 __ mov(scratch, Operand(scratch, LSL, kSystemPointerSizeLog2));
1368 __ sub(start_address, start_address, scratch);
1369 // Push the arguments.
1370 __ PushArray(start_address, num_args, scratch,
1372}
1373
1374// static
1376 MacroAssembler* masm, ConvertReceiverMode receiver_mode,
1379 // ----------- S t a t e -------------
1380 // -- r0 : the number of arguments
1381 // -- r2 : the address of the first argument to be pushed. Subsequent
1382 // arguments should be consecutive above this, in the same order as
1383 // they are to be pushed onto the stack.
1384 // -- r1 : the target to call (can be any Object).
1385 // -----------------------------------
1386 Label stack_overflow;
1387
1389 // The spread argument should not be pushed.
1390 __ sub(r0, r0, Operand(1));
1391 }
1392
1393 if (receiver_mode == ConvertReceiverMode::kNullOrUndefined) {
1394 __ sub(r3, r0, Operand(kJSArgcReceiverSlots));
1395 } else {
1396 __ mov(r3, r0);
1397 }
1398
1399 __ StackOverflowCheck(r3, r4, &stack_overflow);
1400
1401 // Push the arguments. r2 and r4 will be modified.
1402 GenerateInterpreterPushArgs(masm, r3, r2, r4);
1403
1404 // Push "undefined" as the receiver arg if we need to.
1405 if (receiver_mode == ConvertReceiverMode::kNullOrUndefined) {
1406 __ PushRoot(RootIndex::kUndefinedValue);
1407 }
1408
1410 // Pass the spread in the register r2.
1411 // r2 already points to the penultimate argument, the spread
1412 // lies in the next interpreter register.
1413 __ sub(r2, r2, Operand(kSystemPointerSize));
1414 __ ldr(r2, MemOperand(r2));
1415 }
1416
1417 // Call the target.
1419 __ TailCallBuiltin(Builtin::kCallWithSpread);
1420 } else {
1421 __ TailCallBuiltin(Builtins::Call(receiver_mode));
1422 }
1423
1424 __ bind(&stack_overflow);
1425 {
1426 __ TailCallRuntime(Runtime::kThrowStackOverflow);
1427 // Unreachable code.
1428 __ bkpt(0);
1429 }
1430}
1431
1432// static
1434 MacroAssembler* masm, InterpreterPushArgsMode mode) {
1435 // ----------- S t a t e -------------
1436 // -- r0 : argument count
1437 // -- r3 : new target
1438 // -- r1 : constructor to call
1439 // -- r2 : allocation site feedback if available, undefined otherwise.
1440 // -- r4 : address of the first argument
1441 // -----------------------------------
1442 Label stack_overflow;
1443
1444 __ StackOverflowCheck(r0, r6, &stack_overflow);
1445
1447 // The spread argument should not be pushed.
1448 __ sub(r0, r0, Operand(1));
1449 }
1450
1451 Register argc_without_receiver = r6;
1452 __ sub(argc_without_receiver, r0, Operand(kJSArgcReceiverSlots));
1453 // Push the arguments. r4 and r5 will be modified.
1454 GenerateInterpreterPushArgs(masm, argc_without_receiver, r4, r5);
1455
1456 // Push a slot for the receiver to be constructed.
1457 __ mov(r5, Operand::Zero());
1458 __ push(r5);
1459
1461 // Pass the spread in the register r2.
1462 // r4 already points to the penultimate argument, the spread
1463 // lies in the next interpreter register.
1464 __ sub(r4, r4, Operand(kSystemPointerSize));
1465 __ ldr(r2, MemOperand(r4));
1466 } else {
1467 __ AssertUndefinedOrAllocationSite(r2, r5);
1468 }
1469
1471 __ AssertFunction(r1);
1472
1473 // Tail call to the array construct stub (still in the caller
1474 // context at this point).
1475 __ TailCallBuiltin(Builtin::kArrayConstructorImpl);
1476 } else if (mode == InterpreterPushArgsMode::kWithFinalSpread) {
1477 // Call the constructor with r0, r1, and r3 unmodified.
1478 __ TailCallBuiltin(Builtin::kConstructWithSpread);
1479 } else {
1481 // Call the constructor with r0, r1, and r3 unmodified.
1482 __ TailCallBuiltin(Builtin::kConstruct);
1483 }
1484
1485 __ bind(&stack_overflow);
1486 {
1487 __ TailCallRuntime(Runtime::kThrowStackOverflow);
1488 // Unreachable code.
1489 __ bkpt(0);
1490 }
1491}
1492
1493// static
1495 MacroAssembler* masm, ForwardWhichFrame which_frame) {
1496 // ----------- S t a t e -------------
1497 // -- r3 : new target
1498 // -- r1 : constructor to call
1499 // -----------------------------------
1500 Label stack_overflow;
1501
1502 // Load the frame pointer into r4.
1503 switch (which_frame) {
1505 __ mov(r4, fp);
1506 break;
1509 break;
1510 }
1511
1512 // Load the argument count into r0.
1514
1515 __ StackOverflowCheck(r0, r6, &stack_overflow);
1516
1517 // Point r4 to the base of the argument list to forward, excluding the
1518 // receiver.
1519 __ add(r4, r4,
1522
1523 // Copy arguments on the stack. r5 is a scratch register.
1524 Register argc_without_receiver = r6;
1525 __ sub(argc_without_receiver, r0, Operand(kJSArgcReceiverSlots));
1526 __ PushArray(r4, argc_without_receiver, r5);
1527
1528 // Push a slot for the receiver to be constructed.
1529 __ mov(r5, Operand::Zero());
1530 __ push(r5);
1531
1532 // Call the constructor with r0, r1, and r3 unmodified.
1533 __ TailCallBuiltin(Builtin::kConstruct);
1534
1535 __ bind(&stack_overflow);
1536 {
1537 __ TailCallRuntime(Runtime::kThrowStackOverflow);
1538 // Unreachable code.
1539 __ bkpt(0);
1540 }
1541}
1542
1543namespace {
1544
1545void NewImplicitReceiver(MacroAssembler* masm) {
1546 // ----------- S t a t e -------------
1547 // -- r0 : argument count
1548 // -- r1 : constructor to call (checked to be a JSFunction)
1549 // -- r3 : new target
1550 //
1551 // Stack:
1552 // -- Implicit Receiver
1553 // -- [arguments without receiver]
1554 // -- Implicit Receiver
1555 // -- Context
1556 // -- FastConstructMarker
1557 // -- FramePointer
1558 // -----------------------------------
1559 Register implicit_receiver = r4;
1560
1561 // Save live registers.
1562 __ SmiTag(r0);
1563 __ Push(r0, r1, r3);
1564 __ CallBuiltin(Builtin::kFastNewObject);
1565 // Save result.
1566 __ Move(implicit_receiver, r0);
1567 // Restore live registers.
1568 __ Pop(r0, r1, r3);
1569 __ SmiUntag(r0);
1570
1571 // Patch implicit receiver (in arguments)
1572 __ str(implicit_receiver, MemOperand(sp, 0 * kPointerSize));
1573 // Patch second implicit (in construct frame)
1574 __ str(implicit_receiver,
1576
1577 // Restore context.
1579}
1580
1581} // namespace
1582
1583// static
1584void Builtins::Generate_InterpreterPushArgsThenFastConstructFunction(
1585 MacroAssembler* masm) {
1586 // ----------- S t a t e -------------
1587 // -- r0 : argument count
1588 // -- r1 : constructor to call (checked to be a JSFunction)
1589 // -- r3 : new target
1590 // -- r4 : address of the first argument
1591 // -- cp/r7 : context pointer
1592 // -----------------------------------
1593 __ AssertFunction(r1);
1594
1595 // Check if target has a [[Construct]] internal method.
1596 Label non_constructor;
1597 __ LoadMap(r2, r1);
1598 __ ldrb(r2, FieldMemOperand(r2, Map::kBitFieldOffset));
1599 __ tst(r2, Operand(Map::Bits1::IsConstructorBit::kMask));
1600 __ b(eq, &non_constructor);
1601
1602 // Add a stack check before pushing arguments.
1603 Label stack_overflow;
1604 __ StackOverflowCheck(r0, r2, &stack_overflow);
1605
1606 // Enter a construct frame.
1607 FrameScope scope(masm, StackFrame::MANUAL);
1608 __ EnterFrame(StackFrame::FAST_CONSTRUCT);
1609 // Implicit receiver stored in the construct frame.
1610 __ LoadRoot(r2, RootIndex::kTheHoleValue);
1611 __ Push(cp, r2);
1612
1613 // Push arguments + implicit receiver.
1614 Register argc_without_receiver = r6;
1615 __ sub(argc_without_receiver, r0, Operand(kJSArgcReceiverSlots));
1616 // Push the arguments. r4 and r5 will be modified.
1617 GenerateInterpreterPushArgs(masm, argc_without_receiver, r4, r5);
1618 // Implicit receiver as part of the arguments (patched later if needed).
1619 __ push(r2);
1620
1621 // Check if it is a builtin call.
1622 Label builtin_call;
1623 __ ldr(r2, FieldMemOperand(r1, JSFunction::kSharedFunctionInfoOffset));
1624 __ ldr(r2, FieldMemOperand(r2, SharedFunctionInfo::kFlagsOffset));
1625 __ tst(r2, Operand(SharedFunctionInfo::ConstructAsBuiltinBit::kMask));
1626 __ b(ne, &builtin_call);
1627
1628 // Check if we need to create an implicit receiver.
1629 Label not_create_implicit_receiver;
1630 __ DecodeField<SharedFunctionInfo::FunctionKindBits>(r2);
1631 __ JumpIfIsInRange(
1632 r2, r2, static_cast<uint32_t>(FunctionKind::kDefaultDerivedConstructor),
1633 static_cast<uint32_t>(FunctionKind::kDerivedConstructor),
1634 &not_create_implicit_receiver);
1635 NewImplicitReceiver(masm);
1636 __ bind(&not_create_implicit_receiver);
1637
1638 // Call the function.
1639 __ InvokeFunctionWithNewTarget(r1, r3, r0, InvokeType::kCall);
1640
1641 // ----------- S t a t e -------------
1642 // -- r0 constructor result
1643 //
1644 // Stack:
1645 // -- Implicit Receiver
1646 // -- Context
1647 // -- FastConstructMarker
1648 // -- FramePointer
1649 // -----------------------------------
1650
1651 // Store offset of return address for deoptimizer.
1652 masm->isolate()->heap()->SetConstructStubInvokeDeoptPCOffset(
1653 masm->pc_offset());
1654
1655 // If the result is an object (in the ECMA sense), we should get rid
1656 // of the receiver and use the result; see ECMA-262 section 13.2.2-7
1657 // on page 74.
1658 Label use_receiver, do_throw, leave_and_return, check_receiver;
1659
1660 // If the result is undefined, we jump out to using the implicit receiver.
1661 __ JumpIfNotRoot(r0, RootIndex::kUndefinedValue, &check_receiver);
1662
1663 // Otherwise we do a smi check and fall through to check if the return value
1664 // is a valid receiver.
1665
1666 // Throw away the result of the constructor invocation and use the
1667 // on-stack receiver as the result.
1668 __ bind(&use_receiver);
1669 __ ldr(r0,
1671 __ JumpIfRoot(r0, RootIndex::kTheHoleValue, &do_throw);
1672
1673 __ bind(&leave_and_return);
1674 // Leave construct frame.
1675 __ LeaveFrame(StackFrame::CONSTRUCT);
1676 __ Jump(lr);
1677
1678 __ bind(&check_receiver);
1679 // If the result is a smi, it is *not* an object in the ECMA sense.
1680 __ JumpIfSmi(r0, &use_receiver);
1681
1682 // If the type of the result (stored in its map) is less than
1683 // FIRST_JS_RECEIVER_TYPE, it is not an object in the ECMA sense.
1684 static_assert(LAST_JS_RECEIVER_TYPE == LAST_TYPE);
1685 __ CompareObjectType(r0, r4, r5, FIRST_JS_RECEIVER_TYPE);
1686 __ b(ge, &leave_and_return);
1687 __ b(&use_receiver);
1688
1689 __ bind(&builtin_call);
1690 // TODO(victorgomes): Check the possibility to turn this into a tailcall.
1691 __ InvokeFunctionWithNewTarget(r1, r3, r0, InvokeType::kCall);
1692 __ LeaveFrame(StackFrame::FAST_CONSTRUCT);
1693 __ Jump(lr);
1694
1695 __ bind(&do_throw);
1696 // Restore the context from the frame.
1698 __ CallRuntime(Runtime::kThrowConstructorReturnedNonObject);
1699 __ bkpt(0);
1700
1701 __ bind(&stack_overflow);
1702 // Restore the context from the frame.
1703 __ TailCallRuntime(Runtime::kThrowStackOverflow);
1704 // Unreachable code.
1705 __ bkpt(0);
1706
1707 // Called Construct on an Object that doesn't have a [[Construct]] internal
1708 // method.
1709 __ bind(&non_constructor);
1710 __ TailCallBuiltin(Builtin::kConstructedNonConstructable);
1711}
1712
1713static void Generate_InterpreterEnterBytecode(MacroAssembler* masm) {
1714 // Set the return address to the correct point in the interpreter entry
1715 // trampoline.
1716 Label builtin_trampoline, trampoline_loaded;
1717 Tagged<Smi> interpreter_entry_return_pc_offset(
1718 masm->isolate()->heap()->interpreter_entry_return_pc_offset());
1719 DCHECK_NE(interpreter_entry_return_pc_offset, Smi::zero());
1720
1721 // If the SFI function_data is an InterpreterData, the function will have a
1722 // custom copy of the interpreter entry trampoline for profiling. If so,
1723 // get the custom trampoline, otherwise grab the entry address of the global
1724 // trampoline.
1726 __ ldr(r2, FieldMemOperand(r2, JSFunction::kSharedFunctionInfoOffset));
1727 __ ldr(r2,
1728 FieldMemOperand(r2, SharedFunctionInfo::kTrustedFunctionDataOffset));
1729 __ CompareObjectType(r2, kInterpreterDispatchTableRegister,
1731 INTERPRETER_DATA_TYPE);
1732 __ b(ne, &builtin_trampoline);
1733
1734 __ ldr(r2,
1735 FieldMemOperand(r2, InterpreterData::kInterpreterTrampolineOffset));
1736 __ LoadCodeInstructionStart(r2, r2);
1737 __ b(&trampoline_loaded);
1738
1739 __ bind(&builtin_trampoline);
1740 __ Move(r2, ExternalReference::
1741 address_of_interpreter_entry_trampoline_instruction_start(
1742 masm->isolate()));
1743 __ ldr(r2, MemOperand(r2));
1744
1745 __ bind(&trampoline_loaded);
1746 __ add(lr, r2, Operand(interpreter_entry_return_pc_offset.value()));
1747
1748 // Initialize the dispatch table register.
1749 __ Move(
1751 ExternalReference::interpreter_dispatch_table_address(masm->isolate()));
1752
1753 // Get the bytecode array pointer from the frame.
1756
1757 if (v8_flags.debug_code) {
1758 // Check function data field is actually a BytecodeArray object.
1760 __ Assert(
1761 ne, AbortReason::kFunctionDataShouldBeBytecodeArrayOnInterpreterEntry);
1762 __ CompareObjectType(kInterpreterBytecodeArrayRegister, r1, no_reg,
1763 BYTECODE_ARRAY_TYPE);
1764 __ Assert(
1765 eq, AbortReason::kFunctionDataShouldBeBytecodeArrayOnInterpreterEntry);
1766 }
1767
1768 // Get the target bytecode offset from the frame.
1772
1773 if (v8_flags.debug_code) {
1774 Label okay;
1777 __ b(ge, &okay);
1778 __ bkpt(0);
1779 __ bind(&okay);
1780 }
1781
1782 // Dispatch to the target bytecode.
1783 UseScratchRegisterScope temps(masm);
1784 Register scratch = temps.Acquire();
1791}
1792
1793void Builtins::Generate_InterpreterEnterAtNextBytecode(MacroAssembler* masm) {
1794 // Get bytecode array and bytecode offset from the stack frame.
1800
1801 Label enter_bytecode, function_entry_bytecode;
1805 __ b(eq, &function_entry_bytecode);
1806
1807 // Load the current bytecode.
1810
1811 // Advance to the next bytecode.
1812 Label if_return;
1815 &if_return);
1816
1817 __ bind(&enter_bytecode);
1818 // Convert new bytecode offset to a Smi and save in the stackframe.
1821
1823
1824 __ bind(&function_entry_bytecode);
1825 // If the code deoptimizes during the implicit function entry stack interrupt
1826 // check, it will have a bailout ID of kFunctionEntryBytecodeOffset, which is
1827 // not a valid bytecode offset. Detect this case and advance to the first
1828 // actual bytecode.
1831 __ b(&enter_bytecode);
1832
1833 // We should never take the if_return path.
1834 __ bind(&if_return);
1835 __ Abort(AbortReason::kInvalidBytecodeAdvance);
1836}
1837
1838void Builtins::Generate_InterpreterEnterAtBytecode(MacroAssembler* masm) {
1840}
1841
1842namespace {
1843void Generate_ContinueToBuiltinHelper(MacroAssembler* masm,
1844 bool javascript_builtin,
1845 bool with_result) {
1846 const RegisterConfiguration* config(RegisterConfiguration::Default());
1847 int allocatable_register_count = config->num_allocatable_general_registers();
1848 UseScratchRegisterScope temps(masm);
1849 Register scratch = temps.Acquire(); // Temp register is not allocatable.
1850 if (with_result) {
1851 if (javascript_builtin) {
1852 __ mov(scratch, r0);
1853 } else {
1854 // Overwrite the hole inserted by the deoptimizer with the return value
1855 // from the LAZY deopt point.
1856 __ str(
1857 r0,
1858 MemOperand(
1859 sp, config->num_allocatable_general_registers() * kPointerSize +
1861 }
1862 }
1863 for (int i = allocatable_register_count - 1; i >= 0; --i) {
1864 int code = config->GetAllocatableGeneralCode(i);
1865 __ Pop(Register::from_code(code));
1866 if (javascript_builtin && code == kJavaScriptCallArgCountRegister.code()) {
1868 }
1869 }
1870 if (javascript_builtin && with_result) {
1871 // Overwrite the hole inserted by the deoptimizer with the return value from
1872 // the LAZY deopt point. r0 contains the arguments count, the return value
1873 // from LAZY is always the last argument.
1874 constexpr int return_value_offset =
1877 __ add(r0, r0, Operand(return_value_offset));
1878 __ str(scratch, MemOperand(sp, r0, LSL, kPointerSizeLog2));
1879 // Recover arguments count.
1880 __ sub(r0, r0, Operand(return_value_offset));
1881 }
1882 __ ldr(fp, MemOperand(
1884 // Load builtin index (stored as a Smi) and use it to get the builtin start
1885 // address from the builtins table.
1886 Register builtin = scratch;
1887 __ Pop(builtin);
1888 __ add(sp, sp,
1890 __ Pop(lr);
1891 __ LoadEntryFromBuiltinIndex(builtin, builtin);
1892 __ bx(builtin);
1893}
1894} // namespace
1895
1896void Builtins::Generate_ContinueToCodeStubBuiltin(MacroAssembler* masm) {
1897 Generate_ContinueToBuiltinHelper(masm, false, false);
1898}
1899
1900void Builtins::Generate_ContinueToCodeStubBuiltinWithResult(
1901 MacroAssembler* masm) {
1902 Generate_ContinueToBuiltinHelper(masm, false, true);
1903}
1904
1905void Builtins::Generate_ContinueToJavaScriptBuiltin(MacroAssembler* masm) {
1906 Generate_ContinueToBuiltinHelper(masm, true, false);
1907}
1908
1909void Builtins::Generate_ContinueToJavaScriptBuiltinWithResult(
1910 MacroAssembler* masm) {
1911 Generate_ContinueToBuiltinHelper(masm, true, true);
1912}
1913
1914void Builtins::Generate_NotifyDeoptimized(MacroAssembler* masm) {
1915 {
1916 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
1917 __ CallRuntime(Runtime::kNotifyDeoptimized);
1918 }
1919
1921 __ pop(r0);
1922 __ Ret();
1923}
1924
1925namespace {
1926
1927void Generate_OSREntry(MacroAssembler* masm, Register entry_address,
1928 Operand offset = Operand::Zero()) {
1929 // Compute the target address = entry_address + offset
1930 if (offset.IsImmediate() && offset.immediate() == 0) {
1931 __ mov(lr, entry_address);
1932 } else {
1933 __ add(lr, entry_address, offset);
1934 }
1935
1936 // "return" to the OSR entry point of the function.
1937 __ Ret();
1938}
1939
1940enum class OsrSourceTier {
1941 kInterpreter,
1942 kBaseline,
1943};
1944
1945void OnStackReplacement(MacroAssembler* masm, OsrSourceTier source,
1946 Register maybe_target_code,
1947 Register expected_param_count) {
1948 Label jump_to_optimized_code;
1949 {
1950 // If maybe_target_code is not null, no need to call into runtime. A
1951 // precondition here is: if maybe_target_code is an InstructionStream
1952 // object, it must NOT be marked_for_deoptimization (callers must ensure
1953 // this).
1954 __ cmp(maybe_target_code, Operand(Smi::zero()));
1955 __ b(ne, &jump_to_optimized_code);
1956 }
1957
1958 ASM_CODE_COMMENT(masm);
1959 {
1960 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
1961 __ CallRuntime(Runtime::kCompileOptimizedOSR);
1962 }
1963
1964 // If the code object is null, just return to the caller.
1965 __ cmp(r0, Operand(Smi::zero()));
1966 __ b(ne, &jump_to_optimized_code);
1967 __ Ret();
1968
1969 __ bind(&jump_to_optimized_code);
1970 DCHECK_EQ(maybe_target_code, r0); // Already in the right spot.
1971
1972 // OSR entry tracing.
1973 {
1974 Label next;
1975 __ Move(r1, ExternalReference::address_of_log_or_trace_osr());
1976 __ ldrsb(r1, MemOperand(r1));
1977 __ tst(r1, Operand(0xFF)); // Mask to the LSB.
1978 __ b(eq, &next);
1979
1980 {
1981 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
1982 __ Push(r0); // Preserve the code object.
1983 __ CallRuntime(Runtime::kLogOrTraceOptimizedOSREntry, 0);
1984 __ Pop(r0);
1985 }
1986
1987 __ bind(&next);
1988 }
1989
1990 if (source == OsrSourceTier::kInterpreter) {
1991 // Drop the handler frame that is be sitting on top of the actual
1992 // JavaScript frame. This is the case then OSR is triggered from bytecode.
1993 __ LeaveFrame(StackFrame::STUB);
1994 }
1995
1996 // The sandbox would rely on testing expected_parameter_count here.
1997 static_assert(!V8_ENABLE_SANDBOX_BOOL);
1998
1999 // Load deoptimization data from the code object.
2000 // <deopt_data> = <code>[#deoptimization_data_offset]
2001 __ ldr(r1,
2002 FieldMemOperand(r0, Code::kDeoptimizationDataOrInterpreterDataOffset));
2003
2004 __ LoadCodeInstructionStart(r0, r0);
2005
2006 {
2007 ConstantPoolUnavailableScope constant_pool_unavailable(masm);
2008
2009 // Load the OSR entrypoint offset from the deoptimization data.
2010 // <osr_offset> = <deopt_data>[#header_size + #osr_pc_offset]
2013
2014 Generate_OSREntry(masm, r0, Operand::SmiUntag(r1));
2015 }
2016}
2017} // namespace
2018
2019void Builtins::Generate_InterpreterOnStackReplacement(MacroAssembler* masm) {
2020 using D = OnStackReplacementDescriptor;
2021 static_assert(D::kParameterCount == 2);
2022 OnStackReplacement(masm, OsrSourceTier::kInterpreter,
2023 D::MaybeTargetCodeRegister(),
2024 D::ExpectedParameterCountRegister());
2025}
2026
2027void Builtins::Generate_BaselineOnStackReplacement(MacroAssembler* masm) {
2028 using D = OnStackReplacementDescriptor;
2029 static_assert(D::kParameterCount == 2);
2030
2031 __ ldr(kContextRegister,
2033 OnStackReplacement(masm, OsrSourceTier::kBaseline,
2034 D::MaybeTargetCodeRegister(),
2035 D::ExpectedParameterCountRegister());
2036}
2037
2038#ifdef V8_ENABLE_MAGLEV
2039
2040void Builtins::Generate_MaglevFunctionEntryStackCheck(MacroAssembler* masm,
2041 bool save_new_target) {
2042 // Input (r0): Stack size (Smi).
2043 // This builtin can be invoked just after Maglev's prologue.
2044 // All registers are available, except (possibly) new.target.
2045 ASM_CODE_COMMENT(masm);
2046 {
2047 FrameScope scope(masm, StackFrame::INTERNAL);
2048 __ AssertSmi(r0);
2049 if (save_new_target) {
2051 }
2052 __ Push(r0);
2053 __ CallRuntime(Runtime::kStackGuardWithGap, 1);
2054 if (save_new_target) {
2056 }
2057 }
2058 __ Ret();
2059}
2060
2061#endif // V8_ENABLE_MAGLEV
2062
2063// static
2064void Builtins::Generate_FunctionPrototypeApply(MacroAssembler* masm) {
2065 // ----------- S t a t e -------------
2066 // -- r0 : argc
2067 // -- sp[0] : receiver
2068 // -- sp[4] : thisArg
2069 // -- sp[8] : argArray
2070 // -----------------------------------
2071
2072 // 1. Load receiver into r1, argArray into r2 (if present), remove all
2073 // arguments from the stack (including the receiver), and push thisArg (if
2074 // present) instead.
2075 {
2076 __ LoadRoot(r5, RootIndex::kUndefinedValue);
2077 __ mov(r2, r5);
2078 __ ldr(r1, MemOperand(sp, 0)); // receiver
2079 __ cmp(r0, Operand(JSParameterCount(1)));
2080 __ ldr(r5, MemOperand(sp, kSystemPointerSize), ge); // thisArg
2081 __ cmp(r0, Operand(JSParameterCount(2)), ge);
2082 __ ldr(r2, MemOperand(sp, 2 * kSystemPointerSize), ge); // argArray
2083 __ DropArgumentsAndPushNewReceiver(r0, r5);
2084 }
2085
2086 // ----------- S t a t e -------------
2087 // -- r2 : argArray
2088 // -- r1 : receiver
2089 // -- sp[0] : thisArg
2090 // -----------------------------------
2091
2092 // 2. We don't need to check explicitly for callable receiver here,
2093 // since that's the first thing the Call/CallWithArrayLike builtins
2094 // will do.
2095
2096 // 3. Tail call with no arguments if argArray is null or undefined.
2097 Label no_arguments;
2098 __ JumpIfRoot(r2, RootIndex::kNullValue, &no_arguments);
2099 __ JumpIfRoot(r2, RootIndex::kUndefinedValue, &no_arguments);
2100
2101 // 4a. Apply the receiver to the given argArray.
2102 __ TailCallBuiltin(Builtin::kCallWithArrayLike);
2103
2104 // 4b. The argArray is either null or undefined, so we tail call without any
2105 // arguments to the receiver.
2106 __ bind(&no_arguments);
2107 {
2108 __ mov(r0, Operand(JSParameterCount(0)));
2109 __ TailCallBuiltin(Builtins::Call());
2110 }
2111}
2112
2113// static
2114void Builtins::Generate_FunctionPrototypeCall(MacroAssembler* masm) {
2115 // 1. Get the callable to call (passed as receiver) from the stack.
2116 __ Pop(r1);
2117
2118 // 2. Make sure we have at least one argument.
2119 // r0: actual number of arguments
2120 {
2121 Label done;
2122 __ cmp(r0, Operand(JSParameterCount(0)));
2123 __ b(ne, &done);
2124 __ PushRoot(RootIndex::kUndefinedValue);
2125 __ add(r0, r0, Operand(1));
2126 __ bind(&done);
2127 }
2128
2129 // 3. Adjust the actual number of arguments.
2130 __ sub(r0, r0, Operand(1));
2131
2132 // 4. Call the callable.
2133 __ TailCallBuiltin(Builtins::Call());
2134}
2135
2136void Builtins::Generate_ReflectApply(MacroAssembler* masm) {
2137 // ----------- S t a t e -------------
2138 // -- r0 : argc
2139 // -- sp[0] : receiver
2140 // -- sp[4] : target (if argc >= 1)
2141 // -- sp[8] : thisArgument (if argc >= 2)
2142 // -- sp[12] : argumentsList (if argc == 3)
2143 // -----------------------------------
2144
2145 // 1. Load target into r1 (if present), argumentsList into r2 (if present),
2146 // remove all arguments from the stack (including the receiver), and push
2147 // thisArgument (if present) instead.
2148 {
2149 __ LoadRoot(r1, RootIndex::kUndefinedValue);
2150 __ mov(r5, r1);
2151 __ mov(r2, r1);
2152 __ cmp(r0, Operand(JSParameterCount(1)));
2153 __ ldr(r1, MemOperand(sp, kSystemPointerSize), ge); // target
2154 __ cmp(r0, Operand(JSParameterCount(2)), ge);
2155 __ ldr(r5, MemOperand(sp, 2 * kSystemPointerSize), ge); // thisArgument
2156 __ cmp(r0, Operand(JSParameterCount(3)), ge);
2157 __ ldr(r2, MemOperand(sp, 3 * kSystemPointerSize), ge); // argumentsList
2158 __ DropArgumentsAndPushNewReceiver(r0, r5);
2159 }
2160
2161 // ----------- S t a t e -------------
2162 // -- r2 : argumentsList
2163 // -- r1 : target
2164 // -- sp[0] : thisArgument
2165 // -----------------------------------
2166
2167 // 2. We don't need to check explicitly for callable target here,
2168 // since that's the first thing the Call/CallWithArrayLike builtins
2169 // will do.
2170
2171 // 3. Apply the target to the given argumentsList.
2172 __ TailCallBuiltin(Builtin::kCallWithArrayLike);
2173}
2174
2175void Builtins::Generate_ReflectConstruct(MacroAssembler* masm) {
2176 // ----------- S t a t e -------------
2177 // -- r0 : argc
2178 // -- sp[0] : receiver
2179 // -- sp[4] : target
2180 // -- sp[8] : argumentsList
2181 // -- sp[12] : new.target (optional)
2182 // -----------------------------------
2183
2184 // 1. Load target into r1 (if present), argumentsList into r2 (if present),
2185 // new.target into r3 (if present, otherwise use target), remove all
2186 // arguments from the stack (including the receiver), and push thisArgument
2187 // (if present) instead.
2188 {
2189 __ LoadRoot(r1, RootIndex::kUndefinedValue);
2190 __ mov(r2, r1);
2191 __ mov(r4, r1);
2192 __ cmp(r0, Operand(JSParameterCount(1)));
2193 __ ldr(r1, MemOperand(sp, kSystemPointerSize), ge); // target
2194 __ mov(r3, r1); // new.target defaults to target
2195 __ cmp(r0, Operand(JSParameterCount(2)), ge);
2196 __ ldr(r2, MemOperand(sp, 2 * kSystemPointerSize), ge); // argumentsList
2197 __ cmp(r0, Operand(JSParameterCount(3)), ge);
2198 __ ldr(r3, MemOperand(sp, 3 * kSystemPointerSize), ge); // new.target
2199 __ DropArgumentsAndPushNewReceiver(r0, r4);
2200 }
2201
2202 // ----------- S t a t e -------------
2203 // -- r2 : argumentsList
2204 // -- r3 : new.target
2205 // -- r1 : target
2206 // -- sp[0] : receiver (undefined)
2207 // -----------------------------------
2208
2209 // 2. We don't need to check explicitly for constructor target here,
2210 // since that's the first thing the Construct/ConstructWithArrayLike
2211 // builtins will do.
2212
2213 // 3. We don't need to check explicitly for constructor new.target here,
2214 // since that's the second thing the Construct/ConstructWithArrayLike
2215 // builtins will do.
2216
2217 // 4. Construct the target with the given new.target and argumentsList.
2218 __ TailCallBuiltin(Builtin::kConstructWithArrayLike);
2219}
2220
2221namespace {
2222
2223// Allocate new stack space for |count| arguments and shift all existing
2224// arguments already on the stack. |pointer_to_new_space_out| points to the
2225// first free slot on the stack to copy additional arguments to and
2226// |argc_in_out| is updated to include |count|.
2227void Generate_AllocateSpaceAndShiftExistingArguments(
2228 MacroAssembler* masm, Register count, Register argc_in_out,
2229 Register pointer_to_new_space_out, Register scratch1, Register scratch2) {
2230 DCHECK(!AreAliased(count, argc_in_out, pointer_to_new_space_out, scratch1,
2231 scratch2));
2232 UseScratchRegisterScope temps(masm);
2233 Register old_sp = scratch1;
2234 Register new_space = scratch2;
2235 __ mov(old_sp, sp);
2236 __ lsl(new_space, count, Operand(kSystemPointerSizeLog2));
2237 __ AllocateStackSpace(new_space);
2238
2239 Register end = scratch2;
2240 Register value = temps.Acquire();
2241 Register dest = pointer_to_new_space_out;
2242 __ mov(dest, sp);
2243 __ add(end, old_sp, Operand(argc_in_out, LSL, kSystemPointerSizeLog2));
2244 Label loop, done;
2245 __ bind(&loop);
2246 __ cmp(old_sp, end);
2247 __ b(ge, &done);
2248 __ ldr(value, MemOperand(old_sp, kSystemPointerSize, PostIndex));
2249 __ str(value, MemOperand(dest, kSystemPointerSize, PostIndex));
2250 __ b(&loop);
2251 __ bind(&done);
2252
2253 // Update total number of arguments.
2254 __ add(argc_in_out, argc_in_out, count);
2255}
2256
2257} // namespace
2258
2259// static
2260// TODO(v8:11615): Observe Code::kMaxArguments in
2261// CallOrConstructVarargs
2262void Builtins::Generate_CallOrConstructVarargs(MacroAssembler* masm,
2263 Builtin target_builtin) {
2264 // ----------- S t a t e -------------
2265 // -- r1 : target
2266 // -- r0 : number of parameters on the stack
2267 // -- r2 : arguments list (a FixedArray)
2268 // -- r4 : len (number of elements to push from args)
2269 // -- r3 : new.target (for [[Construct]])
2270 // -----------------------------------
2271 Register scratch = r8;
2272
2273 if (v8_flags.debug_code) {
2274 // Allow r2 to be a FixedArray, or a FixedDoubleArray if r4 == 0.
2275 Label ok, fail;
2276 __ AssertNotSmi(r2);
2277 __ ldr(scratch, FieldMemOperand(r2, HeapObject::kMapOffset));
2278 __ ldrh(r6, FieldMemOperand(scratch, Map::kInstanceTypeOffset));
2279 __ cmp(r6, Operand(FIXED_ARRAY_TYPE));
2280 __ b(eq, &ok);
2281 __ cmp(r6, Operand(FIXED_DOUBLE_ARRAY_TYPE));
2282 __ b(ne, &fail);
2283 __ cmp(r4, Operand(0));
2284 __ b(eq, &ok);
2285 // Fall through.
2286 __ bind(&fail);
2287 __ Abort(AbortReason::kOperandIsNotAFixedArray);
2288
2289 __ bind(&ok);
2290 }
2291
2292 Label stack_overflow;
2293 __ StackOverflowCheck(r4, scratch, &stack_overflow);
2294
2295 // Move the arguments already in the stack,
2296 // including the receiver and the return address.
2297 // r4: Number of arguments to make room for.
2298 // r0: Number of arguments already on the stack.
2299 // r9: Points to first free slot on the stack after arguments were shifted.
2300 Generate_AllocateSpaceAndShiftExistingArguments(masm, r4, r0, r9, r5, r6);
2301
2302 // Copy arguments onto the stack (thisArgument is already on the stack).
2303 {
2304 __ mov(r6, Operand(0));
2305 __ LoadRoot(r5, RootIndex::kTheHoleValue);
2306 Label done, loop;
2307 __ bind(&loop);
2308 __ cmp(r6, r4);
2309 __ b(eq, &done);
2310 __ add(scratch, r2, Operand(r6, LSL, kTaggedSizeLog2));
2311 __ ldr(scratch, FieldMemOperand(scratch, OFFSET_OF_DATA_START(FixedArray)));
2312 __ cmp(scratch, r5);
2313 // Turn the hole into undefined as we go.
2314 __ LoadRoot(scratch, RootIndex::kUndefinedValue, eq);
2315 __ str(scratch, MemOperand(r9, kSystemPointerSize, PostIndex));
2316 __ add(r6, r6, Operand(1));
2317 __ b(&loop);
2318 __ bind(&done);
2319 }
2320
2321 // Tail-call to the actual Call or Construct builtin.
2322 __ TailCallBuiltin(target_builtin);
2323
2324 __ bind(&stack_overflow);
2325 __ TailCallRuntime(Runtime::kThrowStackOverflow);
2326}
2327
2328// static
2329void Builtins::Generate_CallOrConstructForwardVarargs(MacroAssembler* masm,
2330 CallOrConstructMode mode,
2331 Builtin target_builtin) {
2332 // ----------- S t a t e -------------
2333 // -- r0 : the number of arguments
2334 // -- r3 : the new.target (for [[Construct]] calls)
2335 // -- r1 : the target to call (can be any Object)
2336 // -- r2 : start index (to support rest parameters)
2337 // -----------------------------------
2338
2339 Register scratch = r6;
2340
2341 // Check if new.target has a [[Construct]] internal method.
2342 if (mode == CallOrConstructMode::kConstruct) {
2343 Label new_target_constructor, new_target_not_constructor;
2344 __ JumpIfSmi(r3, &new_target_not_constructor);
2345 __ ldr(scratch, FieldMemOperand(r3, HeapObject::kMapOffset));
2346 __ ldrb(scratch, FieldMemOperand(scratch, Map::kBitFieldOffset));
2347 __ tst(scratch, Operand(Map::Bits1::IsConstructorBit::kMask));
2348 __ b(ne, &new_target_constructor);
2349 __ bind(&new_target_not_constructor);
2350 {
2351 FrameScope scope(masm, StackFrame::MANUAL);
2352 __ EnterFrame(StackFrame::INTERNAL);
2353 __ Push(r3);
2354 __ CallRuntime(Runtime::kThrowNotConstructor);
2355 }
2356 __ bind(&new_target_constructor);
2357 }
2358
2359 Label stack_done, stack_overflow;
2361 __ sub(r5, r5, Operand(kJSArgcReceiverSlots));
2362 __ sub(r5, r5, r2, SetCC);
2363 __ b(le, &stack_done);
2364 {
2365 // ----------- S t a t e -------------
2366 // -- r0 : the number of arguments already in the stack
2367 // -- r1 : the target to call (can be any Object)
2368 // -- r2 : start index (to support rest parameters)
2369 // -- r3 : the new.target (for [[Construct]] calls)
2370 // -- fp : point to the caller stack frame
2371 // -- r5 : number of arguments to copy, i.e. arguments count - start index
2372 // -----------------------------------
2373
2374 // Check for stack overflow.
2375 __ StackOverflowCheck(r5, scratch, &stack_overflow);
2376
2377 // Forward the arguments from the caller frame.
2378 // Point to the first argument to copy (skipping the receiver).
2379 __ add(r4, fp,
2382 __ add(r4, r4, Operand(r2, LSL, kSystemPointerSizeLog2));
2383
2384 // Move the arguments already in the stack,
2385 // including the receiver and the return address.
2386 // r5: Number of arguments to make room for.
2387 // r0: Number of arguments already on the stack.
2388 // r2: Points to first free slot on the stack after arguments were shifted.
2389 Generate_AllocateSpaceAndShiftExistingArguments(masm, r5, r0, r2, scratch,
2390 r8);
2391
2392 // Copy arguments from the caller frame.
2393 // TODO(victorgomes): Consider using forward order as potentially more cache
2394 // friendly.
2395 {
2396 Label loop;
2397 __ bind(&loop);
2398 {
2399 __ sub(r5, r5, Operand(1), SetCC);
2400 __ ldr(scratch, MemOperand(r4, r5, LSL, kSystemPointerSizeLog2));
2401 __ str(scratch, MemOperand(r2, r5, LSL, kSystemPointerSizeLog2));
2402 __ b(ne, &loop);
2403 }
2404 }
2405 }
2406 __ bind(&stack_done);
2407 // Tail-call to the actual Call or Construct builtin.
2408 __ TailCallBuiltin(target_builtin);
2409
2410 __ bind(&stack_overflow);
2411 __ TailCallRuntime(Runtime::kThrowStackOverflow);
2412}
2413
2414// static
2415void Builtins::Generate_CallFunction(MacroAssembler* masm,
2416 ConvertReceiverMode mode) {
2417 // ----------- S t a t e -------------
2418 // -- r0 : the number of arguments
2419 // -- r1 : the function to call (checked to be a JSFunction)
2420 // -----------------------------------
2421 __ AssertCallableFunction(r1);
2422
2423 __ ldr(r2, FieldMemOperand(r1, JSFunction::kSharedFunctionInfoOffset));
2424
2425 // Enter the context of the function; ToObject has to run in the function
2426 // context, and we also need to take the global proxy from the function
2427 // context in case of conversion.
2428 __ ldr(cp, FieldMemOperand(r1, JSFunction::kContextOffset));
2429 // We need to convert the receiver for non-native sloppy mode functions.
2430 Label done_convert;
2431 __ ldr(r3, FieldMemOperand(r2, SharedFunctionInfo::kFlagsOffset));
2432 __ tst(r3, Operand(SharedFunctionInfo::IsNativeBit::kMask |
2433 SharedFunctionInfo::IsStrictBit::kMask));
2434 __ b(ne, &done_convert);
2435 {
2436 // ----------- S t a t e -------------
2437 // -- r0 : the number of arguments
2438 // -- r1 : the function to call (checked to be a JSFunction)
2439 // -- r2 : the shared function info.
2440 // -- cp : the function context.
2441 // -----------------------------------
2442
2444 // Patch receiver to global proxy.
2445 __ LoadGlobalProxy(r3);
2446 } else {
2447 Label convert_to_object, convert_receiver;
2448 __ ldr(r3, __ ReceiverOperand());
2449 __ JumpIfSmi(r3, &convert_to_object);
2450 static_assert(LAST_JS_RECEIVER_TYPE == LAST_TYPE);
2451 __ CompareObjectType(r3, r4, r4, FIRST_JS_RECEIVER_TYPE);
2452 __ b(hs, &done_convert);
2454 Label convert_global_proxy;
2455 __ JumpIfRoot(r3, RootIndex::kUndefinedValue, &convert_global_proxy);
2456 __ JumpIfNotRoot(r3, RootIndex::kNullValue, &convert_to_object);
2457 __ bind(&convert_global_proxy);
2458 {
2459 // Patch receiver to global proxy.
2460 __ LoadGlobalProxy(r3);
2461 }
2462 __ b(&convert_receiver);
2463 }
2464 __ bind(&convert_to_object);
2465 {
2466 // Convert receiver using ToObject.
2467 // TODO(bmeurer): Inline the allocation here to avoid building the frame
2468 // in the fast case? (fall back to AllocateInNewSpace?)
2469 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
2470 __ SmiTag(r0);
2471 __ Push(r0, r1);
2472 __ mov(r0, r3);
2473 __ Push(cp);
2474 __ CallBuiltin(Builtin::kToObject);
2475 __ Pop(cp);
2476 __ mov(r3, r0);
2477 __ Pop(r0, r1);
2478 __ SmiUntag(r0);
2479 }
2480 __ ldr(r2, FieldMemOperand(r1, JSFunction::kSharedFunctionInfoOffset));
2481 __ bind(&convert_receiver);
2482 }
2483 __ str(r3, __ ReceiverOperand());
2484 }
2485 __ bind(&done_convert);
2486
2487 // ----------- S t a t e -------------
2488 // -- r0 : the number of arguments
2489 // -- r1 : the function to call (checked to be a JSFunction)
2490 // -- r2 : the shared function info.
2491 // -- cp : the function context.
2492 // -----------------------------------
2493
2494 __ ldrh(r2,
2495 FieldMemOperand(r2, SharedFunctionInfo::kFormalParameterCountOffset));
2496 __ InvokeFunctionCode(r1, no_reg, r2, r0, InvokeType::kJump);
2497}
2498
2499namespace {
2500
2501void Generate_PushBoundArguments(MacroAssembler* masm) {
2502 ASM_CODE_COMMENT(masm);
2503 // ----------- S t a t e -------------
2504 // -- r0 : the number of arguments
2505 // -- r1 : target (checked to be a JSBoundFunction)
2506 // -- r3 : new.target (only in case of [[Construct]])
2507 // -----------------------------------
2508
2509 // Load [[BoundArguments]] into r2 and length of that into r4.
2510 Label no_bound_arguments;
2511 __ ldr(r2, FieldMemOperand(r1, JSBoundFunction::kBoundArgumentsOffset));
2512 __ ldr(r4, FieldMemOperand(r2, offsetof(FixedArray, length_)));
2513 __ SmiUntag(r4);
2514 __ cmp(r4, Operand(0));
2515 __ b(eq, &no_bound_arguments);
2516 {
2517 // ----------- S t a t e -------------
2518 // -- r0 : the number of arguments
2519 // -- r1 : target (checked to be a JSBoundFunction)
2520 // -- r2 : the [[BoundArguments]] (implemented as FixedArray)
2521 // -- r3 : new.target (only in case of [[Construct]])
2522 // -- r4 : the number of [[BoundArguments]]
2523 // -----------------------------------
2524
2525 Register scratch = r6;
2526
2527 {
2528 // Check the stack for overflow. We are not trying to catch interruptions
2529 // (i.e. debug break and preemption) here, so check the "real stack
2530 // limit".
2531 Label done;
2532 __ mov(scratch, Operand(r4, LSL, kSystemPointerSizeLog2));
2533 {
2534 UseScratchRegisterScope temps(masm);
2535 Register remaining_stack_size = temps.Acquire();
2536 DCHECK(!AreAliased(r0, r1, r2, r3, r4, scratch, remaining_stack_size));
2537
2538 // Compute the space we have left. The stack might already be overflowed
2539 // here which will cause remaining_stack_size to become negative.
2540 __ LoadStackLimit(remaining_stack_size,
2542 __ sub(remaining_stack_size, sp, remaining_stack_size);
2543
2544 // Check if the arguments will overflow the stack.
2545 __ cmp(remaining_stack_size, scratch);
2546 }
2547 __ b(gt, &done);
2548 {
2549 FrameScope scope(masm, StackFrame::MANUAL);
2550 __ EnterFrame(StackFrame::INTERNAL);
2551 __ CallRuntime(Runtime::kThrowStackOverflow);
2552 }
2553 __ bind(&done);
2554 }
2555
2556 // Pop receiver.
2557 __ Pop(r5);
2558
2559 // Push [[BoundArguments]].
2560 {
2561 Label loop;
2562 __ add(r0, r0, r4); // Adjust effective number of arguments.
2563 __ add(r2, r2,
2564 Operand(OFFSET_OF_DATA_START(FixedArray) - kHeapObjectTag));
2565 __ bind(&loop);
2566 __ sub(r4, r4, Operand(1), SetCC);
2567 __ ldr(scratch, MemOperand(r2, r4, LSL, kTaggedSizeLog2));
2568 __ Push(scratch);
2569 __ b(gt, &loop);
2570 }
2571
2572 // Push receiver.
2573 __ Push(r5);
2574 }
2575 __ bind(&no_bound_arguments);
2576}
2577
2578} // namespace
2579
2580// static
2581void Builtins::Generate_CallBoundFunctionImpl(MacroAssembler* masm) {
2582 // ----------- S t a t e -------------
2583 // -- r0 : the number of arguments
2584 // -- r1 : the function to call (checked to be a JSBoundFunction)
2585 // -----------------------------------
2586 __ AssertBoundFunction(r1);
2587
2588 // Patch the receiver to [[BoundThis]].
2589 __ ldr(r3, FieldMemOperand(r1, JSBoundFunction::kBoundThisOffset));
2590 __ str(r3, __ ReceiverOperand());
2591
2592 // Push the [[BoundArguments]] onto the stack.
2593 Generate_PushBoundArguments(masm);
2594
2595 // Call the [[BoundTargetFunction]] via the Call builtin.
2596 __ ldr(r1, FieldMemOperand(r1, JSBoundFunction::kBoundTargetFunctionOffset));
2597 __ TailCallBuiltin(Builtins::Call());
2598}
2599
2600// static
2601void Builtins::Generate_Call(MacroAssembler* masm, ConvertReceiverMode mode) {
2602 // ----------- S t a t e -------------
2603 // -- r0 : the number of arguments
2604 // -- r1 : the target to call (can be any Object).
2605 // -----------------------------------
2606 Register target = r1;
2607 Register map = r4;
2608 Register instance_type = r5;
2609 Register scratch = r6;
2610 DCHECK(!AreAliased(r0, target, map, instance_type));
2611
2612 Label non_callable, class_constructor;
2613 __ JumpIfSmi(target, &non_callable);
2614 __ LoadMap(map, target);
2615 __ CompareInstanceTypeRange(map, instance_type, scratch,
2618 __ TailCallBuiltin(Builtins::CallFunction(mode), ls);
2619 __ cmp(instance_type, Operand(JS_BOUND_FUNCTION_TYPE));
2620 __ TailCallBuiltin(Builtin::kCallBoundFunction, eq);
2621
2622 // Check if target has a [[Call]] internal method.
2623 {
2624 Register flags = r4;
2625 __ ldrb(flags, FieldMemOperand(map, Map::kBitFieldOffset));
2626 map = no_reg;
2627 __ tst(flags, Operand(Map::Bits1::IsCallableBit::kMask));
2628 __ b(eq, &non_callable);
2629 }
2630
2631 // Check if target is a proxy and call CallProxy external builtin
2632 __ cmp(instance_type, Operand(JS_PROXY_TYPE));
2633 __ TailCallBuiltin(Builtin::kCallProxy, eq);
2634
2635 // Check if target is a wrapped function and call CallWrappedFunction external
2636 // builtin
2637 __ cmp(instance_type, Operand(JS_WRAPPED_FUNCTION_TYPE));
2638 __ TailCallBuiltin(Builtin::kCallWrappedFunction, eq);
2639
2640 // ES6 section 9.2.1 [[Call]] ( thisArgument, argumentsList)
2641 // Check that the function is not a "classConstructor".
2642 __ cmp(instance_type, Operand(JS_CLASS_CONSTRUCTOR_TYPE));
2643 __ b(eq, &class_constructor);
2644
2645 // 2. Call to something else, which might have a [[Call]] internal method (if
2646 // not we raise an exception).
2647 // Overwrite the original receiver the (original) target.
2648 __ str(target, __ ReceiverOperand());
2649 // Let the "call_as_function_delegate" take care of the rest.
2650 __ LoadNativeContextSlot(target, Context::CALL_AS_FUNCTION_DELEGATE_INDEX);
2651 __ TailCallBuiltin(
2653
2654 // 3. Call to something that is not callable.
2655 __ bind(&non_callable);
2656 {
2657 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
2658 __ Push(target);
2659 __ CallRuntime(Runtime::kThrowCalledNonCallable);
2660 __ Trap(); // Unreachable.
2661 }
2662
2663 // 4. The function is a "classConstructor", need to raise an exception.
2664 __ bind(&class_constructor);
2665 {
2666 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
2667 __ Push(target);
2668 __ CallRuntime(Runtime::kThrowConstructorNonCallableError);
2669 __ Trap(); // Unreachable.
2670 }
2671}
2672
2673// static
2674void Builtins::Generate_ConstructFunction(MacroAssembler* masm) {
2675 // ----------- S t a t e -------------
2676 // -- r0 : the number of arguments
2677 // -- r1 : the constructor to call (checked to be a JSFunction)
2678 // -- r3 : the new target (checked to be a constructor)
2679 // -----------------------------------
2680 __ AssertConstructor(r1);
2681 __ AssertFunction(r1);
2682
2683 // Calling convention for function specific ConstructStubs require
2684 // r2 to contain either an AllocationSite or undefined.
2685 __ LoadRoot(r2, RootIndex::kUndefinedValue);
2686
2687 Label call_generic_stub;
2688
2689 // Jump to JSBuiltinsConstructStub or JSConstructStubGeneric.
2690 __ ldr(r4, FieldMemOperand(r1, JSFunction::kSharedFunctionInfoOffset));
2691 __ ldr(r4, FieldMemOperand(r4, SharedFunctionInfo::kFlagsOffset));
2692 __ tst(r4, Operand(SharedFunctionInfo::ConstructAsBuiltinBit::kMask));
2693 __ b(eq, &call_generic_stub);
2694
2695 __ TailCallBuiltin(Builtin::kJSBuiltinsConstructStub);
2696
2697 __ bind(&call_generic_stub);
2698 __ TailCallBuiltin(Builtin::kJSConstructStubGeneric);
2699}
2700
2701// static
2702void Builtins::Generate_ConstructBoundFunction(MacroAssembler* masm) {
2703 // ----------- S t a t e -------------
2704 // -- r0 : the number of arguments
2705 // -- r1 : the function to call (checked to be a JSBoundFunction)
2706 // -- r3 : the new target (checked to be a constructor)
2707 // -----------------------------------
2708 __ AssertConstructor(r1);
2709 __ AssertBoundFunction(r1);
2710
2711 // Push the [[BoundArguments]] onto the stack.
2712 Generate_PushBoundArguments(masm);
2713
2714 // Patch new.target to [[BoundTargetFunction]] if new.target equals target.
2715 __ cmp(r1, r3);
2716 __ ldr(r3, FieldMemOperand(r1, JSBoundFunction::kBoundTargetFunctionOffset),
2717 eq);
2718
2719 // Construct the [[BoundTargetFunction]] via the Construct builtin.
2720 __ ldr(r1, FieldMemOperand(r1, JSBoundFunction::kBoundTargetFunctionOffset));
2721 __ TailCallBuiltin(Builtin::kConstruct);
2722}
2723
2724// static
2725void Builtins::Generate_Construct(MacroAssembler* masm) {
2726 // ----------- S t a t e -------------
2727 // -- r0 : the number of arguments
2728 // -- r1 : the constructor to call (can be any Object)
2729 // -- r3 : the new target (either the same as the constructor or
2730 // the JSFunction on which new was invoked initially)
2731 // -----------------------------------
2732 Register target = r1;
2733 Register map = r4;
2734 Register instance_type = r5;
2735 Register scratch = r6;
2736 DCHECK(!AreAliased(r0, target, map, instance_type, scratch));
2737
2738 // Check if target is a Smi.
2739 Label non_constructor, non_proxy;
2740 __ JumpIfSmi(target, &non_constructor);
2741
2742 // Check if target has a [[Construct]] internal method.
2743 __ ldr(map, FieldMemOperand(target, HeapObject::kMapOffset));
2744 {
2745 Register flags = r2;
2746 DCHECK(!AreAliased(r0, target, map, instance_type, flags));
2747 __ ldrb(flags, FieldMemOperand(map, Map::kBitFieldOffset));
2748 __ tst(flags, Operand(Map::Bits1::IsConstructorBit::kMask));
2749 __ b(eq, &non_constructor);
2750 }
2751
2752 // Dispatch based on instance type.
2753 __ CompareInstanceTypeRange(map, instance_type, scratch,
2754 FIRST_JS_FUNCTION_TYPE, LAST_JS_FUNCTION_TYPE);
2755 __ TailCallBuiltin(Builtin::kConstructFunction, ls);
2756
2757 // Only dispatch to bound functions after checking whether they are
2758 // constructors.
2759 __ cmp(instance_type, Operand(JS_BOUND_FUNCTION_TYPE));
2760 __ TailCallBuiltin(Builtin::kConstructBoundFunction, eq);
2761
2762 // Only dispatch to proxies after checking whether they are constructors.
2763 __ cmp(instance_type, Operand(JS_PROXY_TYPE));
2764 __ b(ne, &non_proxy);
2765 __ TailCallBuiltin(Builtin::kConstructProxy);
2766
2767 // Called Construct on an exotic Object with a [[Construct]] internal method.
2768 __ bind(&non_proxy);
2769 {
2770 // Overwrite the original receiver with the (original) target.
2771 __ str(target, __ ReceiverOperand());
2772 // Let the "call_as_constructor_delegate" take care of the rest.
2773 __ LoadNativeContextSlot(target,
2774 Context::CALL_AS_CONSTRUCTOR_DELEGATE_INDEX);
2775 __ TailCallBuiltin(Builtins::CallFunction());
2776 }
2777
2778 // Called Construct on an Object that doesn't have a [[Construct]] internal
2779 // method.
2780 __ bind(&non_constructor);
2781 __ TailCallBuiltin(Builtin::kConstructedNonConstructable);
2782}
2783
2784#if V8_ENABLE_WEBASSEMBLY
2785
2786struct SaveWasmParamsScope {
2787 explicit SaveWasmParamsScope(MacroAssembler* masm)
2788 : lowest_fp_reg(std::begin(wasm::kFpParamRegisters)[0]),
2789 highest_fp_reg(std::end(wasm::kFpParamRegisters)[-1]),
2790 masm(masm) {
2791 for (Register gp_param_reg : wasm::kGpParamRegisters) {
2792 gp_regs.set(gp_param_reg);
2793 }
2794 gp_regs.set(lr);
2795 for (DwVfpRegister fp_param_reg : wasm::kFpParamRegisters) {
2796 CHECK(fp_param_reg.code() >= lowest_fp_reg.code() &&
2797 fp_param_reg.code() <= highest_fp_reg.code());
2798 }
2799
2800 CHECK_EQ(gp_regs.Count(), arraysize(wasm::kGpParamRegisters) + 1);
2801 CHECK_EQ(highest_fp_reg.code() - lowest_fp_reg.code() + 1,
2802 arraysize(wasm::kFpParamRegisters));
2803 CHECK_EQ(gp_regs.Count(),
2804 WasmLiftoffSetupFrameConstants::kNumberOfSavedGpParamRegs +
2805 1 /* instance */ + 1 /* lr */);
2806 CHECK_EQ(highest_fp_reg.code() - lowest_fp_reg.code() + 1,
2807 WasmLiftoffSetupFrameConstants::kNumberOfSavedFpParamRegs);
2808
2809 __ stm(db_w, sp, gp_regs);
2810 __ vstm(db_w, sp, lowest_fp_reg, highest_fp_reg);
2811 }
2812 ~SaveWasmParamsScope() {
2813 __ vldm(ia_w, sp, lowest_fp_reg, highest_fp_reg);
2814 __ ldm(ia_w, sp, gp_regs);
2815 }
2816
2817 RegList gp_regs;
2818 DwVfpRegister lowest_fp_reg;
2819 DwVfpRegister highest_fp_reg;
2820 MacroAssembler* masm;
2821};
2822
2823// This builtin creates the following stack frame:
2824//
2825// [ feedback vector ] <-- sp // Added by this builtin.
2826// [ Wasm instance data ] // Added by this builtin.
2827// [ WASM frame marker ] // Already there on entry.
2828// [ saved fp ] <-- fp // Already there on entry.
2829void Builtins::Generate_WasmLiftoffFrameSetup(MacroAssembler* masm) {
2830 Register func_index = wasm::kLiftoffFrameSetupFunctionReg;
2831 Register vector = r5;
2832 Register scratch = r7;
2833 Label allocate_vector, done;
2834
2835 __ ldr(vector,
2837 WasmTrustedInstanceData::kFeedbackVectorsOffset));
2838 __ add(vector, vector, Operand(func_index, LSL, kTaggedSizeLog2));
2839 __ ldr(vector, FieldMemOperand(vector, OFFSET_OF_DATA_START(FixedArray)));
2840 __ JumpIfSmi(vector, &allocate_vector);
2841 __ bind(&done);
2843 __ push(vector);
2844 __ Ret();
2845
2846 __ bind(&allocate_vector);
2847
2848 // Feedback vector doesn't exist yet. Call the runtime to allocate it.
2849 // We temporarily change the frame type for this, because we need special
2850 // handling by the stack walker in case of GC.
2851 __ mov(scratch,
2852 Operand(StackFrame::TypeToMarker(StackFrame::WASM_LIFTOFF_SETUP)));
2853 __ str(scratch, MemOperand(sp));
2854 {
2855 SaveWasmParamsScope save_params(masm);
2856 // Arguments to the runtime function: instance data, func_index.
2858 __ SmiTag(func_index);
2859 __ push(func_index);
2860 // Allocate a stack slot where the runtime function can spill a pointer
2861 // to the {NativeModule}.
2862 __ push(r8);
2863 __ Move(cp, Smi::zero());
2864 __ CallRuntime(Runtime::kWasmAllocateFeedbackVector, 3);
2865 __ mov(vector, kReturnRegister0);
2866 // Saved parameters are restored at the end of this block.
2867 }
2868 __ mov(scratch, Operand(StackFrame::TypeToMarker(StackFrame::WASM)));
2869 __ str(scratch, MemOperand(sp));
2870 __ b(&done);
2871}
2872
2873void Builtins::Generate_WasmCompileLazy(MacroAssembler* masm) {
2874 // The function index was put in a register by the jump table trampoline.
2875 // Convert to Smi for the runtime call.
2877 {
2878 HardAbortScope hard_abort(masm); // Avoid calls to Abort.
2879 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
2880
2881 {
2882 SaveWasmParamsScope save_params(masm);
2883
2884 // Push the instance data as an explicit argument to the runtime function.
2886 // Push the function index as second argument.
2888 // Initialize the JavaScript context with 0. CEntry will use it to
2889 // set the current context on the isolate.
2890 __ Move(cp, Smi::zero());
2891 __ CallRuntime(Runtime::kWasmCompileLazy, 2);
2892 // The runtime function returns the jump table slot offset as a Smi. Use
2893 // that to compute the jump target in r8.
2895
2896 // Saved parameters are restored at the end of this block.
2897 }
2898
2899 // After the instance data register has been restored, we can add the jump
2900 // table start to the jump table offset already stored in r8.
2902 WasmTrustedInstanceData::kJumpTableStartOffset));
2903 __ add(r8, r8, r9);
2904 }
2905
2906 // Finally, jump to the jump table slot for the function.
2907 __ Jump(r8);
2908}
2909
2910void Builtins::Generate_WasmDebugBreak(MacroAssembler* masm) {
2911 HardAbortScope hard_abort(masm); // Avoid calls to Abort.
2912 {
2913 FrameAndConstantPoolScope scope(masm, StackFrame::WASM_DEBUG_BREAK);
2914
2915 static_assert(DwVfpRegister::kNumRegisters == 32);
2916 constexpr DwVfpRegister last =
2918 constexpr DwVfpRegister first =
2920 static_assert(
2922 last.code() - first.code() + 1,
2923 "All registers in the range from first to last have to be set");
2924
2925 // Save all parameter registers. They might hold live values, we restore
2926 // them after the runtime call.
2927 constexpr DwVfpRegister lowest_fp_reg = first;
2928 constexpr DwVfpRegister highest_fp_reg = last;
2929
2930 // Store gp parameter registers.
2932 // Store fp parameter registers.
2933 __ vstm(db_w, sp, lowest_fp_reg, highest_fp_reg);
2934
2935 // Initialize the JavaScript context with 0. CEntry will use it to
2936 // set the current context on the isolate.
2937 __ Move(cp, Smi::zero());
2938 __ CallRuntime(Runtime::kWasmDebugBreak, 0);
2939
2940 // Restore registers.
2941 __ vldm(ia_w, sp, lowest_fp_reg, highest_fp_reg);
2943 }
2944 __ Ret();
2945}
2946
2947namespace {
2948// Check that the stack was in the old state (if generated code assertions are
2949// enabled), and switch to the new state.
2950void SwitchStackState(MacroAssembler* masm, Register stack, Register tmp,
2952 wasm::JumpBuffer::StackState new_state) {
2953 __ ldr(tmp, MemOperand(stack, wasm::kStackStateOffset));
2954 Label ok;
2955 __ JumpIfEqual(tmp, old_state, &ok);
2956 __ Trap();
2957 __ bind(&ok);
2958 __ mov(tmp, Operand(new_state));
2959 __ str(tmp, MemOperand(stack, wasm::kStackStateOffset));
2960}
2961
2962// Switch the stack pointer.
2963void SwitchStackPointer(MacroAssembler* masm, Register stack) {
2964 __ ldr(sp, MemOperand(stack, wasm::kStackSpOffset));
2965}
2966
2967void FillJumpBuffer(MacroAssembler* masm, Register stack, Label* target,
2968 Register tmp) {
2969 __ mov(tmp, sp);
2970 __ str(tmp, MemOperand(stack, wasm::kStackSpOffset));
2971 __ str(fp, MemOperand(stack, wasm::kStackFpOffset));
2972 __ LoadStackLimit(tmp, StackLimitKind::kRealStackLimit);
2973 __ str(tmp, MemOperand(stack, wasm::kStackLimitOffset));
2974
2975 __ GetLabelAddress(tmp, target);
2976 // Stash the address in the jump buffer.
2977 __ str(tmp, MemOperand(stack, wasm::kStackPcOffset));
2978}
2979
2980void LoadJumpBuffer(MacroAssembler* masm, Register stack, bool load_pc,
2981 Register tmp, wasm::JumpBuffer::StackState expected_state) {
2982 SwitchStackPointer(masm, stack);
2983 __ ldr(fp, MemOperand(stack, wasm::kStackFpOffset));
2984 SwitchStackState(masm, stack, tmp, expected_state, wasm::JumpBuffer::Active);
2985 if (load_pc) {
2986 __ ldr(tmp, MemOperand(stack, wasm::kStackPcOffset));
2987 __ bx(tmp);
2988 }
2989 // The stack limit in StackGuard is set separately under the ExecutionAccess
2990 // lock.
2991}
2992
2993void LoadTargetJumpBuffer(MacroAssembler* masm, Register target_stack,
2994 Register tmp,
2995 wasm::JumpBuffer::StackState expected_state) {
2996 __ Zero(MemOperand(fp, StackSwitchFrameConstants::kGCScanSlotCountOffset));
2997 // Switch stack!
2998 LoadJumpBuffer(masm, target_stack, false, tmp, expected_state);
2999}
3000
3001// Updates the stack limit and central stack info, and validates the switch.
3002void SwitchStacks(MacroAssembler* masm, Register old_stack, bool return_switch,
3003 const std::initializer_list<Register> keep) {
3004 using ER = ExternalReference;
3005
3006 for (auto reg : keep) {
3007 __ Push(reg);
3008 }
3009
3010 {
3011 __ PrepareCallCFunction(2);
3012 FrameScope scope(masm, StackFrame::MANUAL);
3013 // Move {old_stack} first in case it aliases kCArgRegs[0].
3014 __ Move(kCArgRegs[1], old_stack);
3015 __ Move(kCArgRegs[0], ExternalReference::isolate_address(masm->isolate()));
3016 __ CallCFunction(
3017 return_switch ? ER::wasm_return_switch() : ER::wasm_switch_stacks(), 2);
3018 }
3019
3020 for (auto it = std::rbegin(keep); it != std::rend(keep); ++it) {
3021 __ Pop(*it);
3022 }
3023}
3024
3025void ReloadParentStack(MacroAssembler* masm, Register return_reg,
3026 Register return_value, Register context, Register tmp1,
3027 Register tmp2, Register tmp3) {
3028 Register active_stack = tmp1;
3029 __ LoadRootRelative(active_stack, IsolateData::active_stack_offset());
3030
3031 // Set a null pointer in the jump buffer's SP slot to indicate to the stack
3032 // frame iterator that this stack is empty.
3033 __ Zero(MemOperand(active_stack, wasm::kStackSpOffset));
3034 {
3035 UseScratchRegisterScope temps(masm);
3036 Register scratch = temps.Acquire();
3037 SwitchStackState(masm, active_stack, scratch, wasm::JumpBuffer::Active,
3039 }
3040 Register parent = tmp2;
3041 __ ldr(parent, MemOperand(active_stack, wasm::kStackParentOffset));
3042
3043 // Update active stack.
3044 __ StoreRootRelative(IsolateData::active_stack_offset(), parent);
3045
3046 // Switch stack!
3047 SwitchStacks(masm, active_stack, true,
3048 {return_reg, return_value, context, parent});
3049 LoadJumpBuffer(masm, parent, false, tmp3, wasm::JumpBuffer::Inactive);
3050}
3051
3052void RestoreParentSuspender(MacroAssembler* masm, Register tmp1) {
3053 Register suspender = tmp1;
3054 __ LoadRoot(suspender, RootIndex::kActiveSuspender);
3055 __ LoadTaggedField(
3056 suspender,
3057 FieldMemOperand(suspender, WasmSuspenderObject::kParentOffset));
3058
3059 int32_t active_suspender_offset =
3061 RootIndex::kActiveSuspender);
3062 __ str(suspender, MemOperand(kRootRegister, active_suspender_offset));
3063}
3064
3065void ResetStackSwitchFrameStackSlots(MacroAssembler* masm) {
3066 __ Zero(MemOperand(fp, StackSwitchFrameConstants::kResultArrayOffset),
3067 MemOperand(fp, StackSwitchFrameConstants::kImplicitArgOffset));
3068}
3069
3070// TODO(irezvov): Consolidate with arm64 RegisterAllocator.
3071class RegisterAllocator {
3072 public:
3073 class Scoped {
3074 public:
3075 Scoped(RegisterAllocator* allocator, Register* reg)
3076 : allocator_(allocator), reg_(reg) {}
3077 ~Scoped() { allocator_->Free(reg_); }
3078
3079 private:
3080 RegisterAllocator* allocator_;
3081 Register* reg_;
3082 };
3083
3084 explicit RegisterAllocator(const RegList& registers)
3086 void Ask(Register* reg) {
3087 DCHECK_EQ(*reg, no_reg);
3088 DCHECK(!available_.is_empty());
3089 *reg = available_.PopFirst();
3090 allocated_registers_.push_back(reg);
3091 }
3092
3093 bool registerIsAvailable(const Register& reg) { return available_.has(reg); }
3094
3095 void Pinned(const Register& requested, Register* reg) {
3096 DCHECK(registerIsAvailable(requested));
3097 *reg = requested;
3098 Reserve(requested);
3099 allocated_registers_.push_back(reg);
3100 }
3101
3102 void Free(Register* reg) {
3103 DCHECK_NE(*reg, no_reg);
3104 available_.set(*reg);
3105 *reg = no_reg;
3107 find(allocated_registers_.begin(), allocated_registers_.end(), reg));
3108 }
3109
3110 void Reserve(const Register& reg) {
3111 if (reg == no_reg) {
3112 return;
3113 }
3114 DCHECK(registerIsAvailable(reg));
3115 available_.clear(reg);
3116 }
3117
3118 void Reserve(const Register& reg1, const Register& reg2,
3119 const Register& reg3 = no_reg, const Register& reg4 = no_reg,
3120 const Register& reg5 = no_reg, const Register& reg6 = no_reg) {
3121 Reserve(reg1);
3122 Reserve(reg2);
3123 Reserve(reg3);
3124 Reserve(reg4);
3125 Reserve(reg5);
3126 Reserve(reg6);
3127 }
3128
3129 bool IsUsed(const Register& reg) {
3130 return initial_.has(reg) && !registerIsAvailable(reg);
3131 }
3132
3133 void ResetExcept(const Register& reg1 = no_reg, const Register& reg2 = no_reg,
3134 const Register& reg3 = no_reg, const Register& reg4 = no_reg,
3135 const Register& reg5 = no_reg,
3136 const Register& reg6 = no_reg) {
3138 available_.clear(reg1);
3139 available_.clear(reg2);
3140 available_.clear(reg3);
3141 available_.clear(reg4);
3142 available_.clear(reg5);
3143 available_.clear(reg6);
3144
3145 auto it = allocated_registers_.begin();
3146 while (it != allocated_registers_.end()) {
3147 if (registerIsAvailable(**it)) {
3148 **it = no_reg;
3149 it = allocated_registers_.erase(it);
3150 } else {
3151 it++;
3152 }
3153 }
3154 }
3155
3156 static RegisterAllocator WithAllocatableGeneralRegisters() {
3157 RegList list;
3158 const RegisterConfiguration* config(RegisterConfiguration::Default());
3159
3160 for (int i = 0; i < config->num_allocatable_general_registers(); ++i) {
3161 int code = config->GetAllocatableGeneralCode(i);
3162 Register candidate = Register::from_code(code);
3163 list.set(candidate);
3164 }
3165 return RegisterAllocator(list);
3166 }
3167
3168 private:
3169 std::vector<Register*> allocated_registers_;
3170 const RegList initial_;
3172};
3173
3174#define DEFINE_REG(Name) \
3175 Register Name = no_reg; \
3176 regs.Ask(&Name);
3177
3178#define DEFINE_REG_W(Name) \
3179 DEFINE_REG(Name); \
3180 Name = Name.W();
3181
3182#define ASSIGN_REG(Name) regs.Ask(&Name);
3183
3184#define ASSIGN_REG_W(Name) \
3185 ASSIGN_REG(Name); \
3186 Name = Name.W();
3187
3188#define DEFINE_PINNED(Name, Reg) \
3189 Register Name = no_reg; \
3190 regs.Pinned(Reg, &Name);
3191
3192#define ASSIGN_PINNED(Name, Reg) regs.Pinned(Reg, &Name);
3193
3194#define DEFINE_SCOPED(Name) \
3195 DEFINE_REG(Name) \
3196 RegisterAllocator::Scoped scope_##Name(&regs, &Name);
3197
3198#define FREE_REG(Name) regs.Free(&Name);
3199
3200// Loads the context field of the WasmTrustedInstanceData or WasmImportData
3201// depending on the data's type, and places the result in the input register.
3202void GetContextFromImplicitArg(MacroAssembler* masm, Register data,
3203 Register scratch) {
3204 __ LoadTaggedField(scratch, FieldMemOperand(data, HeapObject::kMapOffset));
3205 __ CompareInstanceType(scratch, scratch, WASM_TRUSTED_INSTANCE_DATA_TYPE);
3206 Label instance;
3207 Label end;
3208 __ b(eq, &instance);
3209 __ LoadTaggedField(
3210 data, FieldMemOperand(data, WasmImportData::kNativeContextOffset));
3211 __ jmp(&end);
3212 __ bind(&instance);
3213 __ LoadTaggedField(
3214 data,
3215 FieldMemOperand(data, WasmTrustedInstanceData::kNativeContextOffset));
3216 __ bind(&end);
3217}
3218
3219} // namespace
3220
3221void Builtins::Generate_WasmToJsWrapperAsm(MacroAssembler* masm) {
3222 // Push registers in reverse order so that they are on the stack like
3223 // in an array, with the first item being at the lowest address.
3224 for (int i = static_cast<int>(arraysize(wasm::kFpParamRegisters)) - 1; i >= 0;
3225 --i) {
3227 }
3228
3229 // r6 is pushed for alignment, so that the pushed register parameters and
3230 // stack parameters look the same as the layout produced by the js-to-wasm
3231 // wrapper for out-going parameters. Having the same layout allows to share
3232 // code in Torque, especially the `LocationAllocator`. r6 has been picked
3233 // arbitrarily.
3236 // Reserve a slot for the signature.
3237 __ Push(r0);
3238 __ TailCallBuiltin(Builtin::kWasmToJsWrapperCSA);
3239}
3240
3241void Builtins::Generate_WasmTrapHandlerLandingPad(MacroAssembler* masm) {
3242 __ Trap();
3243}
3244
3245void Builtins::Generate_WasmSuspend(MacroAssembler* masm) {
3246 auto regs = RegisterAllocator::WithAllocatableGeneralRegisters();
3247 // Set up the stackframe.
3248 __ EnterFrame(StackFrame::STACK_SWITCH);
3249
3250 DEFINE_PINNED(suspender, r0);
3252
3253 __ sub(
3254 sp, sp,
3255 Operand(StackSwitchFrameConstants::kNumSpillSlots * kSystemPointerSize));
3256 // Set a sentinel value for the spill slots visited by the GC.
3257 ResetStackSwitchFrameStackSlots(masm);
3258
3259 // -------------------------------------------
3260 // Save current state in active jump buffer.
3261 // -------------------------------------------
3262 Label resume;
3263 DEFINE_REG(stack);
3264 __ LoadRootRelative(stack, IsolateData::active_stack_offset());
3265 DEFINE_REG(scratch);
3266 FillJumpBuffer(masm, stack, &resume, scratch);
3267 SwitchStackState(masm, stack, scratch, wasm::JumpBuffer::Active,
3269 regs.ResetExcept(suspender, stack);
3270
3271 DEFINE_REG(suspender_stack);
3272 __ ldr(suspender_stack,
3273 FieldMemOperand(suspender, WasmSuspenderObject::kStackOffset));
3274 if (v8_flags.debug_code) {
3275 // -------------------------------------------
3276 // Check that the suspender's stack is the active stack.
3277 // -------------------------------------------
3278 // TODO(thibaudm): Once we add core stack-switching instructions, this
3279 // check will not hold anymore: it's possible that the active stack
3280 // changed (due to an internal switch), so we have to update the suspender.
3281 __ cmp(suspender_stack, stack);
3282 Label ok;
3283 __ b(&ok, eq);
3284 __ Trap();
3285 __ bind(&ok);
3286 }
3287 // -------------------------------------------
3288 // Update roots.
3289 // -------------------------------------------
3290 DEFINE_REG(caller);
3291 __ ldr(caller, MemOperand(suspender_stack, wasm::kStackParentOffset));
3292 __ StoreRootRelative(IsolateData::active_stack_offset(), caller);
3293 DEFINE_REG(parent);
3294 __ LoadTaggedField(
3295 parent, FieldMemOperand(suspender, WasmSuspenderObject::kParentOffset));
3296 int32_t active_suspender_offset =
3298 RootIndex::kActiveSuspender);
3299 __ str(parent, MemOperand(kRootRegister, active_suspender_offset));
3300 regs.ResetExcept(suspender, caller, stack);
3301
3302 // -------------------------------------------
3303 // Load jump buffer.
3304 // -------------------------------------------
3305 SwitchStacks(masm, stack, false, {caller, suspender});
3306 FREE_REG(stack);
3307 __ LoadTaggedField(
3309 FieldMemOperand(suspender, WasmSuspenderObject::kPromiseOffset));
3310 MemOperand GCScanSlotPlace =
3311 MemOperand(fp, StackSwitchFrameConstants::kGCScanSlotCountOffset);
3312 __ Zero(GCScanSlotPlace);
3313 ASSIGN_REG(scratch)
3314 LoadJumpBuffer(masm, caller, true, scratch, wasm::JumpBuffer::Inactive);
3315 if (v8_flags.debug_code) {
3316 __ Trap();
3317 }
3318 __ bind(&resume);
3319 __ LeaveFrame(StackFrame::STACK_SWITCH);
3320 __ Jump(lr);
3321}
3322
3323namespace {
3324// Resume the suspender stored in the closure. We generate two variants of this
3325// builtin: the onFulfilled variant resumes execution at the saved PC and
3326// forwards the value, the onRejected variant throws the value.
3327
3328void Generate_WasmResumeHelper(MacroAssembler* masm, wasm::OnResume on_resume) {
3329 auto regs = RegisterAllocator::WithAllocatableGeneralRegisters();
3330 __ EnterFrame(StackFrame::STACK_SWITCH);
3331
3332 DEFINE_PINNED(closure, kJSFunctionRegister); // r1
3333
3334 __ sub(
3335 sp, sp,
3336 Operand(StackSwitchFrameConstants::kNumSpillSlots * kSystemPointerSize));
3337 // Set a sentinel value for the spill slots visited by the GC.
3338 ResetStackSwitchFrameStackSlots(masm);
3339
3340 regs.ResetExcept(closure);
3341
3342 // -------------------------------------------
3343 // Load suspender from closure.
3344 // -------------------------------------------
3345 DEFINE_REG(sfi);
3346 __ LoadTaggedField(
3347 sfi,
3348 MemOperand(
3349 closure,
3351 FREE_REG(closure);
3352 // Suspender should be ObjectRegister register to be used in
3353 // RecordWriteField calls later.
3355 DEFINE_REG(resume_data);
3356 __ LoadTaggedField(
3357 resume_data,
3358 FieldMemOperand(sfi, SharedFunctionInfo::kUntrustedFunctionDataOffset));
3359 __ LoadTaggedField(
3360 suspender,
3361 FieldMemOperand(resume_data, WasmResumeData::kSuspenderOffset));
3362 regs.ResetExcept(suspender);
3363
3364 // -------------------------------------------
3365 // Save current state.
3366 // -------------------------------------------
3367 Label suspend;
3368 DEFINE_REG(active_stack);
3369 __ LoadRootRelative(active_stack, IsolateData::active_stack_offset());
3370 DEFINE_REG(scratch);
3371 FillJumpBuffer(masm, active_stack, &suspend, scratch);
3372 SwitchStackState(masm, active_stack, scratch, wasm::JumpBuffer::Active,
3374
3375 // -------------------------------------------
3376 // Set the suspender and stack parents and update the roots
3377 // -------------------------------------------
3378 DEFINE_REG(active_suspender);
3379 __ LoadRoot(active_suspender, RootIndex::kActiveSuspender);
3380 __ StoreTaggedField(
3381 active_suspender,
3382 FieldMemOperand(suspender, WasmSuspenderObject::kParentOffset));
3383 __ RecordWriteField(suspender, WasmSuspenderObject::kParentOffset,
3384 active_suspender, kLRHasBeenSaved,
3386 int32_t active_suspender_offset =
3388 RootIndex::kActiveSuspender);
3389 __ str(suspender, MemOperand(kRootRegister, active_suspender_offset));
3390
3391 // Next line we are going to load a field from suspender, but we have to use
3392 // the same register for target_continuation to use it in RecordWriteField.
3393 // So, free suspender here to use pinned reg, but load from it next line.
3394 FREE_REG(suspender);
3395 DEFINE_REG(target_stack);
3396 suspender = target_stack;
3397 __ ldr(target_stack,
3398 FieldMemOperand(suspender, WasmSuspenderObject::kStackOffset));
3399 suspender = no_reg;
3400
3401 __ StoreRootRelative(IsolateData::active_stack_offset(), target_stack);
3402 SwitchStacks(masm, active_stack, false, {target_stack});
3403 regs.ResetExcept(target_stack);
3404
3405 // -------------------------------------------
3406 // Load state from target jmpbuf (longjmp).
3407 // -------------------------------------------
3408 regs.Reserve(kReturnRegister0);
3409 ASSIGN_REG(scratch);
3410 // Move resolved value to return register.
3412 MemOperand GCScanSlotPlace =
3413 MemOperand(fp, StackSwitchFrameConstants::kGCScanSlotCountOffset);
3414 __ Zero(GCScanSlotPlace);
3415 if (on_resume == wasm::OnResume::kThrow) {
3416 // Switch without restoring the PC.
3417 LoadJumpBuffer(masm, target_stack, false, scratch,
3419 // Pop this frame now. The unwinder expects that the first STACK_SWITCH
3420 // frame is the outermost one.
3421 __ LeaveFrame(StackFrame::STACK_SWITCH);
3422 // Forward the onRejected value to kThrow.
3423 __ Push(kReturnRegister0);
3424 __ CallRuntime(Runtime::kThrow);
3425 } else {
3426 // Resume the stack normally.
3427 LoadJumpBuffer(masm, target_stack, true, scratch,
3429 }
3430 if (v8_flags.debug_code) {
3431 __ Trap();
3432 }
3433 __ bind(&suspend);
3434 __ LeaveFrame(StackFrame::STACK_SWITCH);
3435 // Pop receiver + parameter.
3436 __ add(sp, sp, Operand(2 * kSystemPointerSize));
3437 __ Jump(lr);
3438}
3439} // namespace
3440
3441void Builtins::Generate_WasmResume(MacroAssembler* masm) {
3442 Generate_WasmResumeHelper(masm, wasm::OnResume::kContinue);
3443}
3444
3445void Builtins::Generate_WasmReject(MacroAssembler* masm) {
3446 Generate_WasmResumeHelper(masm, wasm::OnResume::kThrow);
3447}
3448
3449void Builtins::Generate_WasmOnStackReplace(MacroAssembler* masm) {
3450 // Only needed on x64.
3451 __ Trap();
3452}
3453
3454namespace {
3455void SwitchToAllocatedStack(MacroAssembler* masm, RegisterAllocator& regs,
3456 Register wasm_instance, Register wrapper_buffer,
3457 Register& original_fp, Register& new_wrapper_buffer,
3458 Label* suspend) {
3459 ResetStackSwitchFrameStackSlots(masm);
3460 DEFINE_SCOPED(scratch)
3461 DEFINE_REG(target_stack)
3462 __ LoadRootRelative(target_stack, IsolateData::active_stack_offset());
3463 DEFINE_REG(parent_stack)
3464 __ ldr(parent_stack, MemOperand(target_stack, wasm::kStackParentOffset));
3465
3466 FillJumpBuffer(masm, parent_stack, suspend, scratch);
3467 SwitchStacks(masm, parent_stack, false, {wasm_instance, wrapper_buffer});
3468
3469 FREE_REG(parent_stack);
3470 // Save the old stack's fp in x9, and use it to access the parameters in
3471 // the parent frame.
3472 regs.Pinned(r9, &original_fp);
3473 __ Move(original_fp, fp);
3474 __ LoadRootRelative(target_stack, IsolateData::active_stack_offset());
3475 LoadTargetJumpBuffer(masm, target_stack, scratch,
3477 FREE_REG(target_stack);
3478
3479 // Push the loaded fp. We know it is null, because there is no frame yet,
3480 // so we could also push 0 directly. In any case we need to push it,
3481 // because this marks the base of the stack segment for
3482 // the stack frame iterator.
3483 __ EnterFrame(StackFrame::STACK_SWITCH);
3484
3485 int stack_space =
3486 RoundUp(StackSwitchFrameConstants::kNumSpillSlots * kSystemPointerSize +
3487 JSToWasmWrapperFrameConstants::kWrapperBufferSize,
3488 16);
3489 __ sub(sp, sp, Operand(stack_space));
3490 __ EnforceStackAlignment();
3491
3492 ASSIGN_REG(new_wrapper_buffer)
3493
3494 __ Move(new_wrapper_buffer, sp);
3495 // Copy data needed for return handling from old wrapper buffer to new one.
3496
3497 __ ldr(scratch,
3498 MemOperand(wrapper_buffer,
3499 JSToWasmWrapperFrameConstants::kWrapperBufferReturnCount));
3500 __ str(scratch,
3501 MemOperand(new_wrapper_buffer,
3502 JSToWasmWrapperFrameConstants::kWrapperBufferReturnCount));
3503 __ ldr(
3504 scratch,
3505 MemOperand(wrapper_buffer,
3506 JSToWasmWrapperFrameConstants::kWrapperBufferRefReturnCount));
3507 __ str(
3508 scratch,
3509 MemOperand(new_wrapper_buffer,
3510 JSToWasmWrapperFrameConstants::kWrapperBufferRefReturnCount));
3511 __ ldr(
3512 scratch,
3513 MemOperand(
3514 wrapper_buffer,
3515 JSToWasmWrapperFrameConstants::kWrapperBufferSigRepresentationArray));
3516 __ str(
3517 scratch,
3518 MemOperand(
3519 new_wrapper_buffer,
3520 JSToWasmWrapperFrameConstants::kWrapperBufferSigRepresentationArray));
3521
3522 __ ldr(
3523 scratch,
3524 MemOperand(
3525 wrapper_buffer,
3526 JSToWasmWrapperFrameConstants::kWrapperBufferSigRepresentationArray +
3527 4));
3528 __ str(
3529 scratch,
3530 MemOperand(
3531 new_wrapper_buffer,
3532 JSToWasmWrapperFrameConstants::kWrapperBufferSigRepresentationArray +
3533 4));
3534}
3535
3536void SwitchBackAndReturnPromise(MacroAssembler* masm, RegisterAllocator& regs,
3537 wasm::Promise mode, Label* return_promise) {
3538 regs.ResetExcept();
3539 // The return value of the wasm function becomes the parameter of the
3540 // FulfillPromise builtin, and the promise is the return value of this
3541 // wrapper.
3542 static const Builtin_FulfillPromise_InterfaceDescriptor desc;
3543 DEFINE_PINNED(promise, desc.GetRegisterParameter(0));
3544 DEFINE_PINNED(return_value, desc.GetRegisterParameter(1));
3545 DEFINE_SCOPED(tmp);
3546 DEFINE_SCOPED(tmp2);
3547 DEFINE_SCOPED(tmp3);
3548 if (mode == wasm::kPromise) {
3549 __ Move(return_value, kReturnRegister0);
3550 __ LoadRoot(promise, RootIndex::kActiveSuspender);
3551 __ LoadTaggedField(
3552 promise, FieldMemOperand(promise, WasmSuspenderObject::kPromiseOffset));
3553 }
3554 __ ldr(kContextRegister,
3555 MemOperand(fp, StackSwitchFrameConstants::kImplicitArgOffset));
3556 GetContextFromImplicitArg(masm, kContextRegister, tmp);
3557
3558 ReloadParentStack(masm, promise, return_value, kContextRegister, tmp, tmp2,
3559 tmp3);
3560 RestoreParentSuspender(masm, tmp);
3561
3562 if (mode == wasm::kPromise) {
3563 __ Move(tmp, Operand(1));
3564 __ str(tmp,
3565 MemOperand(fp, StackSwitchFrameConstants::kGCScanSlotCountOffset));
3566 __ Push(promise);
3567 __ CallBuiltin(Builtin::kFulfillPromise);
3568 __ Pop(promise);
3569 }
3570 FREE_REG(promise);
3571 FREE_REG(return_value);
3572 __ bind(return_promise);
3573}
3574
3575void GenerateExceptionHandlingLandingPad(MacroAssembler* masm,
3576 RegisterAllocator& regs,
3577 Label* return_promise) {
3578 regs.ResetExcept();
3579 static const Builtin_RejectPromise_InterfaceDescriptor desc;
3580 DEFINE_PINNED(promise, desc.GetRegisterParameter(0));
3581 DEFINE_PINNED(reason, desc.GetRegisterParameter(1));
3582 DEFINE_PINNED(debug_event, desc.GetRegisterParameter(2));
3583 int catch_handler = __ pc_offset();
3584
3585 DEFINE_SCOPED(thread_in_wasm_flag_addr);
3586 thread_in_wasm_flag_addr = r2;
3587
3588 // Unset thread_in_wasm_flag.
3589 __ ldr(
3590 thread_in_wasm_flag_addr,
3592 __ Zero(MemOperand(thread_in_wasm_flag_addr, 0));
3593
3594 // The exception becomes the parameter of the RejectPromise builtin, and the
3595 // promise is the return value of this wrapper.
3596 __ Move(reason, kReturnRegister0);
3597 __ LoadRoot(promise, RootIndex::kActiveSuspender);
3598 __ LoadTaggedField(
3599 promise, FieldMemOperand(promise, WasmSuspenderObject::kPromiseOffset));
3600
3601 DEFINE_SCOPED(tmp);
3602 DEFINE_SCOPED(tmp2);
3603 DEFINE_SCOPED(tmp3);
3604 __ ldr(kContextRegister,
3605 MemOperand(fp, StackSwitchFrameConstants::kImplicitArgOffset));
3606 GetContextFromImplicitArg(masm, kContextRegister, tmp);
3607 ReloadParentStack(masm, promise, reason, kContextRegister, tmp, tmp2, tmp3);
3608 RestoreParentSuspender(masm, tmp);
3609
3610 __ Move(tmp, Operand(1));
3611 __ str(tmp,
3612 MemOperand(fp, StackSwitchFrameConstants::kGCScanSlotCountOffset));
3613 __ Push(promise);
3614 __ LoadRoot(debug_event, RootIndex::kTrueValue);
3615 __ CallBuiltin(Builtin::kRejectPromise);
3616 __ Pop(promise);
3617
3618 // Run the rest of the wrapper normally (deconstruct the frame, ...).
3619 __ jmp(return_promise);
3620
3621 masm->isolate()->builtins()->SetJSPIPromptHandlerOffset(catch_handler);
3622}
3623
3624void JSToWasmWrapperHelper(MacroAssembler* masm, wasm::Promise mode) {
3625 bool stack_switch = mode == wasm::kPromise || mode == wasm::kStressSwitch;
3626 auto regs = RegisterAllocator::WithAllocatableGeneralRegisters();
3627
3628 __ EnterFrame(stack_switch ? StackFrame::STACK_SWITCH
3629 : StackFrame::JS_TO_WASM);
3630
3631 __ AllocateStackSpace(StackSwitchFrameConstants::kNumSpillSlots *
3633
3634 // Load the implicit argument (instance data or import data) from the frame.
3636 __ ldr(implicit_arg,
3637 MemOperand(fp, JSToWasmWrapperFrameConstants::kImplicitArgOffset));
3638
3639 DEFINE_PINNED(wrapper_buffer,
3641
3642 Label suspend;
3643 Register original_fp = no_reg;
3644 Register new_wrapper_buffer = no_reg;
3645 if (stack_switch) {
3646 SwitchToAllocatedStack(masm, regs, implicit_arg, wrapper_buffer,
3647 original_fp, new_wrapper_buffer, &suspend);
3648 } else {
3649 original_fp = fp;
3650 new_wrapper_buffer = wrapper_buffer;
3651 }
3652
3653 regs.ResetExcept(original_fp, wrapper_buffer, implicit_arg,
3654 new_wrapper_buffer);
3655
3656 {
3657 __ str(new_wrapper_buffer,
3658 MemOperand(fp, JSToWasmWrapperFrameConstants::kWrapperBufferOffset));
3659 if (stack_switch) {
3660 __ str(implicit_arg,
3661 MemOperand(fp, StackSwitchFrameConstants::kImplicitArgOffset));
3662 DEFINE_SCOPED(scratch)
3663 __ ldr(
3664 scratch,
3665 MemOperand(original_fp,
3666 JSToWasmWrapperFrameConstants::kResultArrayParamOffset));
3667 __ str(scratch,
3668 MemOperand(fp, StackSwitchFrameConstants::kResultArrayOffset));
3669 }
3670 }
3671 {
3672 DEFINE_SCOPED(result_size);
3673 __ ldr(result_size,
3674 MemOperand(wrapper_buffer, JSToWasmWrapperFrameConstants::
3675 kWrapperBufferStackReturnBufferSize));
3676 __ sub(sp, sp, Operand(result_size, LSL, kSystemPointerSizeLog2));
3677 }
3678
3679 __ str(
3680 sp,
3681 MemOperand(
3682 new_wrapper_buffer,
3683 JSToWasmWrapperFrameConstants::kWrapperBufferStackReturnBufferStart));
3684
3685 if (stack_switch) {
3686 FREE_REG(new_wrapper_buffer)
3687 }
3688 FREE_REG(implicit_arg)
3689 for (auto reg : wasm::kGpParamRegisters) {
3690 regs.Reserve(reg);
3691 }
3692
3693 // The first GP parameter holds the trusted instance data or the import data.
3694 // This is handled specially.
3695 int stack_params_offset =
3698 int param_padding = stack_params_offset & kSystemPointerSize;
3699 stack_params_offset += param_padding;
3700
3701 {
3702 DEFINE_SCOPED(params_start);
3703 __ ldr(params_start,
3704 MemOperand(wrapper_buffer,
3705 JSToWasmWrapperFrameConstants::kWrapperBufferParamStart));
3706 {
3707 // Push stack parameters on the stack.
3708 DEFINE_SCOPED(params_end);
3709 __ ldr(params_end,
3710 MemOperand(wrapper_buffer,
3711 JSToWasmWrapperFrameConstants::kWrapperBufferParamEnd));
3712 DEFINE_SCOPED(last_stack_param);
3713
3714 __ add(last_stack_param, params_start, Operand(stack_params_offset));
3715 Label loop_start;
3716 __ bind(&loop_start);
3717
3718 Label finish_stack_params;
3719 __ cmp(last_stack_param, params_end);
3720 __ b(ge, &finish_stack_params);
3721
3722 // Push parameter
3723 {
3724 DEFINE_SCOPED(scratch);
3725 __ ldr(scratch, MemOperand(params_end, -kSystemPointerSize, PreIndex));
3726 __ push(scratch);
3727 }
3728 __ jmp(&loop_start);
3729
3730 __ bind(&finish_stack_params);
3731 }
3732
3733 size_t next_offset = 0;
3734 for (size_t i = 1; i < arraysize(wasm::kGpParamRegisters); i++) {
3735 // Check that {params_start} does not overlap with any of the parameter
3736 // registers, so that we don't overwrite it by accident with the loads
3737 // below.
3738 DCHECK_NE(params_start, wasm::kGpParamRegisters[i]);
3739 __ ldr(wasm::kGpParamRegisters[i], MemOperand(params_start, next_offset));
3740 next_offset += kSystemPointerSize;
3741 }
3742
3743 next_offset += param_padding;
3744 for (size_t i = 0; i < arraysize(wasm::kFpParamRegisters); i++) {
3746 MemOperand(params_start, next_offset));
3747 next_offset += kDoubleSize;
3748 }
3749 DCHECK_EQ(next_offset, stack_params_offset);
3750 }
3751
3752 {
3753 DEFINE_SCOPED(thread_in_wasm_flag_addr);
3754 __ ldr(thread_in_wasm_flag_addr,
3757 DEFINE_SCOPED(scratch);
3758 __ Move(scratch, Operand(1));
3759 __ str(scratch, MemOperand(thread_in_wasm_flag_addr, 0));
3760 }
3761
3762 __ Zero(MemOperand(fp, StackSwitchFrameConstants::kGCScanSlotCountOffset));
3763 {
3764 DEFINE_SCOPED(call_target);
3765 __ ldr(call_target,
3766 MemOperand(wrapper_buffer,
3767 JSToWasmWrapperFrameConstants::kWrapperBufferCallTarget));
3768 __ CallWasmCodePointer(call_target);
3769 }
3770
3771 regs.ResetExcept();
3772 // The wrapper_buffer has to be in r2 as the correct parameter register.
3773 regs.Reserve(kReturnRegister0, kReturnRegister1);
3774 ASSIGN_PINNED(wrapper_buffer, r2);
3775 {
3776 DEFINE_SCOPED(thread_in_wasm_flag_addr);
3777 __ ldr(thread_in_wasm_flag_addr,
3780 __ Zero(MemOperand(thread_in_wasm_flag_addr, 0));
3781 }
3782
3783 __ ldr(wrapper_buffer,
3784 MemOperand(fp, JSToWasmWrapperFrameConstants::kWrapperBufferOffset));
3785
3787 MemOperand(
3788 wrapper_buffer,
3789 JSToWasmWrapperFrameConstants::kWrapperBufferFPReturnRegister1));
3791 MemOperand(
3792 wrapper_buffer,
3793 JSToWasmWrapperFrameConstants::kWrapperBufferFPReturnRegister2));
3795 MemOperand(
3796 wrapper_buffer,
3797 JSToWasmWrapperFrameConstants::kWrapperBufferGPReturnRegister1));
3799 MemOperand(
3800 wrapper_buffer,
3801 JSToWasmWrapperFrameConstants::kWrapperBufferGPReturnRegister2));
3802 // Call the return value builtin with
3803 // r0: wasm instance.
3804 // r1: the result JSArray for multi-return.
3805 // r2: pointer to the byte buffer which contains all parameters.
3806 if (stack_switch) {
3807 __ ldr(r1, MemOperand(fp, StackSwitchFrameConstants::kResultArrayOffset));
3808 __ ldr(r0, MemOperand(fp, StackSwitchFrameConstants::kImplicitArgOffset));
3809 } else {
3810 __ ldr(r1, MemOperand(
3811 fp, JSToWasmWrapperFrameConstants::kResultArrayParamOffset));
3812 __ ldr(r0,
3813 MemOperand(fp, JSToWasmWrapperFrameConstants::kImplicitArgOffset));
3814 }
3815 Register scratch = r3;
3816 GetContextFromImplicitArg(masm, r0, scratch);
3817
3818 __ CallBuiltin(Builtin::kJSToWasmHandleReturns);
3819
3820 Label return_promise;
3821 if (stack_switch) {
3822 SwitchBackAndReturnPromise(masm, regs, mode, &return_promise);
3823 }
3824 __ bind(&suspend);
3825
3826 __ LeaveFrame(stack_switch ? StackFrame::STACK_SWITCH
3827 : StackFrame::JS_TO_WASM);
3828 // Despite returning to the different location for regular and stack switching
3829 // versions, incoming argument count matches both cases:
3830 // instance and result array without suspend or
3831 // or promise resolve/reject params for callback.
3832 __ add(sp, sp, Operand(2 * kSystemPointerSize));
3833 __ Jump(lr);
3834
3835 // Catch handler for the stack-switching wrapper: reject the promise with the
3836 // thrown exception.
3837 if (mode == wasm::kPromise) {
3838 // Block literal pool emission whilst taking the position of the handler
3839 // entry.
3840 Assembler::BlockConstPoolScope block_const_pool(masm);
3841 GenerateExceptionHandlingLandingPad(masm, regs, &return_promise);
3842 }
3843 // Emit constant pool now.
3844 __ CheckConstPool(true, false);
3845}
3846} // namespace
3847
3848void Builtins::Generate_JSToWasmWrapperAsm(MacroAssembler* masm) {
3849 JSToWasmWrapperHelper(masm, wasm::kNoPromise);
3850}
3851
3852void Builtins::Generate_WasmReturnPromiseOnSuspendAsm(MacroAssembler* masm) {
3853 JSToWasmWrapperHelper(masm, wasm::kPromise);
3854}
3855
3856void Builtins::Generate_JSToWasmStressSwitchStacksAsm(MacroAssembler* masm) {
3857 JSToWasmWrapperHelper(masm, wasm::kStressSwitch);
3858}
3859
3860namespace {
3861
3862static constexpr Register kOldSPRegister = r7;
3863static constexpr Register kSwitchFlagRegister = r8;
3864
3865void SwitchToTheCentralStackIfNeeded(MacroAssembler* masm, Register argc_input,
3866 Register target_input,
3867 Register argv_input) {
3868 using ER = ExternalReference;
3869
3870 __ Move(kOldSPRegister, sp);
3871
3872 // Using r2 & r3 as temporary registers, because they will be rewritten
3873 // before exiting to native code anyway.
3874
3875 ER on_central_stack_flag_loc = ER::Create(
3876 IsolateAddressId::kIsOnCentralStackFlagAddress, masm->isolate());
3877 __ Move(kSwitchFlagRegister, on_central_stack_flag_loc);
3878 __ ldrb(kSwitchFlagRegister, MemOperand(kSwitchFlagRegister));
3879
3880 Label do_not_need_to_switch;
3881 __ cmp(kSwitchFlagRegister, Operand(0));
3882 __ b(ne, &do_not_need_to_switch);
3883
3884 // Switch to central stack.
3885
3886 Register central_stack_sp = r2;
3887 DCHECK(!AreAliased(central_stack_sp, argc_input, argv_input, target_input));
3888 {
3889 __ Push(argc_input);
3890 __ Push(target_input);
3891 __ Push(argv_input);
3892 __ PrepareCallCFunction(2);
3893 __ Move(kCArgRegs[0], ER::isolate_address());
3894 __ Move(kCArgRegs[1], kOldSPRegister);
3895 __ CallCFunction(ER::wasm_switch_to_the_central_stack(), 2,
3897 __ Move(central_stack_sp, kReturnRegister0);
3898 __ Pop(argv_input);
3899 __ Pop(target_input);
3900 __ Pop(argc_input);
3901 }
3902
3903 static constexpr int kReturnAddressSlotOffset = 1 * kSystemPointerSize;
3904 static constexpr int kPadding = 1 * kSystemPointerSize;
3905 __ sub(sp, central_stack_sp, Operand(kReturnAddressSlotOffset + kPadding));
3906 __ EnforceStackAlignment();
3907
3908 // Update the sp saved in the frame.
3909 // It will be used to calculate the callee pc during GC.
3910 // The pc is going to be on the new stack segment, so rewrite it here.
3911 __ add(central_stack_sp, sp, Operand(kSystemPointerSize));
3912 __ str(central_stack_sp, MemOperand(fp, ExitFrameConstants::kSPOffset));
3913
3914 __ bind(&do_not_need_to_switch);
3915}
3916
3917void SwitchFromTheCentralStackIfNeeded(MacroAssembler* masm) {
3918 using ER = ExternalReference;
3919
3920 Label no_stack_change;
3921
3922 __ cmp(kSwitchFlagRegister, Operand(0));
3923 __ b(ne, &no_stack_change);
3924
3925 {
3927 __ PrepareCallCFunction(1);
3928 __ Move(kCArgRegs[0], ER::isolate_address());
3929 __ CallCFunction(ER::wasm_switch_from_the_central_stack(), 1,
3932 }
3933
3934 __ Move(sp, kOldSPRegister);
3935
3936 __ bind(&no_stack_change);
3937}
3938
3939} // namespace
3940
3941#endif // V8_ENABLE_WEBASSEMBLY
3942
3943void Builtins::Generate_CEntry(MacroAssembler* masm, int result_size,
3944 ArgvMode argv_mode, bool builtin_exit_frame,
3945 bool switch_to_central_stack) {
3946 // Called from JavaScript; parameters are on stack as if calling JS function.
3947 // r0: number of arguments including receiver
3948 // r1: pointer to C++ function
3949 // fp: frame pointer (restored after C call)
3950 // sp: stack pointer (restored as callee's sp after C call)
3951 // cp: current context (C callee-saved)
3952
3953 // If argv_mode == ArgvMode::kRegister:
3954 // r2: pointer to the first argument
3955
3956 using ER = ExternalReference;
3957
3958 // Move input arguments to more convenient registers.
3959 static constexpr Register argc_input = r0;
3960 static constexpr Register target_fun = r5; // C callee-saved
3961 static constexpr Register argv = r1;
3962 static constexpr Register scratch = r3;
3963 static constexpr Register argc_sav = r4; // C callee-saved
3964
3965 __ mov(target_fun, Operand(r1));
3966
3967 if (argv_mode == ArgvMode::kRegister) {
3968 // Move argv into the correct register.
3969 __ mov(argv, Operand(r2));
3970 } else {
3971 // Compute the argv pointer in a callee-saved register.
3972 __ add(argv, sp, Operand(argc_input, LSL, kPointerSizeLog2));
3973 __ sub(argv, argv, Operand(kPointerSize));
3974 }
3975
3976 // Enter the exit frame that transitions from JavaScript to C++.
3977 FrameScope scope(masm, StackFrame::MANUAL);
3978 __ EnterExitFrame(
3979 scratch, 0,
3980 builtin_exit_frame ? StackFrame::BUILTIN_EXIT : StackFrame::EXIT);
3981
3982 // Store a copy of argc in callee-saved registers for later.
3983 __ mov(argc_sav, Operand(argc_input));
3984
3985 // r0: number of arguments including receiver
3986 // r4: number of arguments including receiver (C callee-saved)
3987 // r1: pointer to the first argument
3988 // r5: pointer to builtin function (C callee-saved)
3989
3990#if V8_HOST_ARCH_ARM
3991 int frame_alignment = MacroAssembler::ActivationFrameAlignment();
3992 int frame_alignment_mask = frame_alignment - 1;
3993 if (v8_flags.debug_code) {
3994 if (frame_alignment > kPointerSize) {
3995 Label alignment_as_expected;
3996 DCHECK(base::bits::IsPowerOfTwo(frame_alignment));
3997 __ tst(sp, Operand(frame_alignment_mask));
3998 __ b(eq, &alignment_as_expected);
3999 // Don't use Check here, as it will call Runtime_Abort re-entering here.
4000 __ stop();
4001 __ bind(&alignment_as_expected);
4002 }
4003 }
4004#endif
4005
4006#if V8_ENABLE_WEBASSEMBLY
4007 if (switch_to_central_stack) {
4008 SwitchToTheCentralStackIfNeeded(masm, argc_input, target_fun, argv);
4009 }
4010#endif // V8_ENABLE_WEBASSEMBLY
4011
4012 // Call C built-in.
4013 // r0 = argc, r1 = argv, r2 = isolate, r5 = target_fun
4014 DCHECK_EQ(kCArgRegs[0], argc_input);
4015 DCHECK_EQ(kCArgRegs[1], argv);
4016 __ Move(kCArgRegs[2], ER::isolate_address());
4017 __ StoreReturnAddressAndCall(target_fun);
4018
4019 // Result returned in r0 or r1:r0 - do not destroy these registers!
4020
4021 // Check result for exception sentinel.
4022 Label exception_returned;
4023 __ CompareRoot(r0, RootIndex::kException);
4024 __ b(eq, &exception_returned);
4025
4026#if V8_ENABLE_WEBASSEMBLY
4027 if (switch_to_central_stack) {
4028 SwitchFromTheCentralStackIfNeeded(masm);
4029 }
4030#endif // V8_ENABLE_WEBASSEMBLY
4031
4032 // Check that there is no exception, otherwise we
4033 // should have returned the exception sentinel.
4034 if (v8_flags.debug_code) {
4035 Label okay;
4036 ER exception_address =
4037 ER::Create(IsolateAddressId::kExceptionAddress, masm->isolate());
4038 __ ldr(scratch, __ ExternalReferenceAsOperand(exception_address, no_reg));
4039 __ CompareRoot(scratch, RootIndex::kTheHoleValue);
4040 // Cannot use check here as it attempts to generate call into runtime.
4041 __ b(eq, &okay);
4042 __ stop();
4043 __ bind(&okay);
4044 }
4045
4046 // Exit C frame and return.
4047 // r0:r1: result
4048 // sp: stack pointer
4049 // fp: frame pointer
4050 // r4: still holds argc (C caller-saved).
4051 __ LeaveExitFrame(scratch);
4052 if (argv_mode == ArgvMode::kStack) {
4053 DCHECK(!AreAliased(scratch, argc_sav));
4054 __ add(sp, sp, Operand(argc_sav, LSL, kPointerSizeLog2));
4055 }
4056
4057 __ mov(pc, lr);
4058
4059 // Handling of exception.
4060 __ bind(&exception_returned);
4061
4062 ER pending_handler_context_address = ER::Create(
4063 IsolateAddressId::kPendingHandlerContextAddress, masm->isolate());
4064 ER pending_handler_entrypoint_address = ER::Create(
4065 IsolateAddressId::kPendingHandlerEntrypointAddress, masm->isolate());
4066 ER pending_handler_fp_address =
4067 ER::Create(IsolateAddressId::kPendingHandlerFPAddress, masm->isolate());
4068 ER pending_handler_sp_address =
4069 ER::Create(IsolateAddressId::kPendingHandlerSPAddress, masm->isolate());
4070
4071 // Ask the runtime for help to determine the handler. This will set r0 to
4072 // contain the current exception, don't clobber it.
4073 {
4074 FrameScope scope(masm, StackFrame::MANUAL);
4075 __ PrepareCallCFunction(3, 0);
4076 __ mov(kCArgRegs[0], Operand(0));
4077 __ mov(kCArgRegs[1], Operand(0));
4078 __ Move(kCArgRegs[2], ER::isolate_address());
4079 __ CallCFunction(ER::Create(Runtime::kUnwindAndFindExceptionHandler), 3,
4081 }
4082
4083 // Retrieve the handler context, SP and FP.
4084 __ Move(cp, pending_handler_context_address);
4085 __ ldr(cp, MemOperand(cp));
4086 __ Move(sp, pending_handler_sp_address);
4087 __ ldr(sp, MemOperand(sp));
4088 __ Move(fp, pending_handler_fp_address);
4089 __ ldr(fp, MemOperand(fp));
4090
4091 // If the handler is a JS frame, restore the context to the frame. Note that
4092 // the context will be set to (cp == 0) for non-JS frames.
4093 __ cmp(cp, Operand(0));
4095
4096 // Clear c_entry_fp, like we do in `LeaveExitFrame`.
4097 ER c_entry_fp_address =
4098 ER::Create(IsolateAddressId::kCEntryFPAddress, masm->isolate());
4099 __ mov(scratch, Operand::Zero());
4100 __ str(scratch, __ ExternalReferenceAsOperand(c_entry_fp_address, no_reg));
4101
4102 // Compute the handler entry address and jump to it.
4103 ConstantPoolUnavailableScope constant_pool_unavailable(masm);
4104 __ ldr(scratch, __ ExternalReferenceAsOperand(
4105 pending_handler_entrypoint_address, no_reg));
4106 __ Jump(scratch);
4107}
4108
4109#if V8_ENABLE_WEBASSEMBLY
4110void Builtins::Generate_WasmHandleStackOverflow(MacroAssembler* masm) {
4111 using ER = ExternalReference;
4112 Register frame_base = WasmHandleStackOverflowDescriptor::FrameBaseRegister();
4114 {
4115 DCHECK_NE(kCArgRegs[1], frame_base);
4116 DCHECK_NE(kCArgRegs[3], frame_base);
4117 __ mov(kCArgRegs[3], gap);
4118 __ mov(kCArgRegs[1], sp);
4119 __ sub(kCArgRegs[2], frame_base, kCArgRegs[1]);
4120 // On Arm we need preserve rbp value somewhere before entering
4121 // INTERNAL frame later. It will be placed on the stack as an argument.
4122 __ mov(kCArgRegs[0], fp);
4123 FrameScope scope(masm, StackFrame::INTERNAL);
4124 __ push(kCArgRegs[3]);
4125 __ PrepareCallCFunction(5);
4126 __ str(kCArgRegs[0], MemOperand(sp, 0 * kPointerSize)); // current_fp.
4127 __ Move(kCArgRegs[0], ER::isolate_address());
4128 __ CallCFunction(ER::wasm_grow_stack(), 5);
4129 __ pop(gap);
4131 }
4132 Label call_runtime;
4133 // wasm_grow_stack returns zero if it cannot grow a stack.
4134 __ cmp(kReturnRegister0, Operand(0));
4135 __ b(eq, &call_runtime);
4136
4137 // Calculate old FP - SP offset to adjust FP accordingly to new SP.
4138 __ sub(fp, fp, sp);
4139 __ add(fp, fp, kReturnRegister0);
4140 __ mov(sp, kReturnRegister0);
4141 {
4142 UseScratchRegisterScope temps(masm);
4143 Register scratch = temps.Acquire();
4144 __ mov(scratch,
4145 Operand(StackFrame::TypeToMarker(StackFrame::WASM_SEGMENT_START)));
4147 }
4148 __ Ret();
4149
4150 __ bind(&call_runtime);
4151 // If wasm_grow_stack returns zero interruption or stack overflow
4152 // should be handled by runtime call.
4153 {
4155 MemOperand(fp, WasmFrameConstants::kWasmInstanceDataOffset));
4156 __ LoadTaggedField(
4158 WasmTrustedInstanceData::kNativeContextOffset));
4159 FrameScope scope(masm, StackFrame::MANUAL);
4160 __ EnterFrame(StackFrame::INTERNAL);
4161 __ SmiTag(gap);
4162 __ push(gap);
4163 __ CallRuntime(Runtime::kWasmStackGuard);
4164 __ LeaveFrame(StackFrame::INTERNAL);
4165 __ Ret();
4166 }
4167}
4168#endif // V8_ENABLE_WEBASSEMBLY
4169
4170void Builtins::Generate_DoubleToI(MacroAssembler* masm) {
4171 Label negate, done;
4172
4173 HardAbortScope hard_abort(masm); // Avoid calls to Abort.
4174 UseScratchRegisterScope temps(masm);
4175 Register result_reg = r7;
4176 Register double_low = GetRegisterThatIsNotOneOf(result_reg);
4177 Register double_high = GetRegisterThatIsNotOneOf(result_reg, double_low);
4178 LowDwVfpRegister double_scratch = temps.AcquireLowD();
4179
4180 // Save the old values from these temporary registers on the stack.
4181 __ Push(result_reg, double_high, double_low);
4182
4183 // Account for saved regs.
4184 const int kArgumentOffset = 3 * kPointerSize;
4185
4186 MemOperand input_operand(sp, kArgumentOffset);
4187 MemOperand result_operand = input_operand;
4188
4189 // Load double input.
4190 __ vldr(double_scratch, input_operand);
4191 __ vmov(double_low, double_high, double_scratch);
4192 // Try to convert with a FPU convert instruction. This handles all
4193 // non-saturating cases.
4194 __ TryInlineTruncateDoubleToI(result_reg, double_scratch, &done);
4195
4196 Register scratch = temps.Acquire();
4197 __ Ubfx(scratch, double_high, HeapNumber::kExponentShift,
4199 // Load scratch with exponent - 1. This is faster than loading
4200 // with exponent because Bias + 1 = 1024 which is an *ARM* immediate value.
4201 static_assert(HeapNumber::kExponentBias + 1 == 1024);
4202 __ sub(scratch, scratch, Operand(HeapNumber::kExponentBias + 1));
4203 // If exponent is greater than or equal to 84, the 32 less significant
4204 // bits are 0s (2^84 = 1, 52 significant bits, 32 uncoded bits),
4205 // the result is 0.
4206 // Compare exponent with 84 (compare exponent - 1 with 83). If the exponent is
4207 // greater than this, the conversion is out of range, so return zero.
4208 __ cmp(scratch, Operand(83));
4209 __ mov(result_reg, Operand::Zero(), LeaveCC, ge);
4210 __ b(ge, &done);
4211
4212 // If we reach this code, 30 <= exponent <= 83.
4213 // `TryInlineTruncateDoubleToI` above will have truncated any double with an
4214 // exponent lower than 30.
4215 if (v8_flags.debug_code) {
4216 // Scratch is exponent - 1.
4217 __ cmp(scratch, Operand(30 - 1));
4218 __ Check(ge, AbortReason::kUnexpectedValue);
4219 }
4220
4221 // We don't have to handle cases where 0 <= exponent <= 20 for which we would
4222 // need to shift right the high part of the mantissa.
4223 // Scratch contains exponent - 1.
4224 // Load scratch with 52 - exponent (load with 51 - (exponent - 1)).
4225 __ rsb(scratch, scratch, Operand(51), SetCC);
4226
4227 // 52 <= exponent <= 83, shift only double_low.
4228 // On entry, scratch contains: 52 - exponent.
4229 __ rsb(scratch, scratch, Operand::Zero(), LeaveCC, ls);
4230 __ mov(result_reg, Operand(double_low, LSL, scratch), LeaveCC, ls);
4231 __ b(ls, &negate);
4232
4233 // 21 <= exponent <= 51, shift double_low and double_high
4234 // to generate the result.
4235 __ mov(double_low, Operand(double_low, LSR, scratch));
4236 // Scratch contains: 52 - exponent.
4237 // We needs: exponent - 20.
4238 // So we use: 32 - scratch = 32 - 52 + exponent = exponent - 20.
4239 __ rsb(scratch, scratch, Operand(32));
4240 __ Ubfx(result_reg, double_high, 0, HeapNumber::kMantissaBitsInTopWord);
4241 // Set the implicit 1 before the mantissa part in double_high.
4242 __ orr(result_reg, result_reg,
4244 __ orr(result_reg, double_low, Operand(result_reg, LSL, scratch));
4245
4246 __ bind(&negate);
4247 // If input was positive, double_high ASR 31 equals 0 and
4248 // double_high LSR 31 equals zero.
4249 // New result = (result eor 0) + 0 = result.
4250 // If the input was negative, we have to negate the result.
4251 // Input_high ASR 31 equals 0xFFFFFFFF and double_high LSR 31 equals 1.
4252 // New result = (result eor 0xFFFFFFFF) + 1 = 0 - result.
4253 __ eor(result_reg, result_reg, Operand(double_high, ASR, 31));
4254 __ add(result_reg, result_reg, Operand(double_high, LSR, 31));
4255
4256 __ bind(&done);
4257 __ str(result_reg, result_operand);
4258
4259 // Restore registers corrupted in this routine and return.
4260 __ Pop(result_reg, double_high, double_low);
4261 __ Ret();
4262}
4263
4264void Builtins::Generate_CallApiCallbackImpl(MacroAssembler* masm,
4265 CallApiCallbackMode mode) {
4266 // ----------- S t a t e -------------
4267 // CallApiCallbackMode::kOptimizedNoProfiling/kOptimized modes:
4268 // -- r1 : api function address
4269 // Both modes:
4270 // -- r2 : arguments count (not including the receiver)
4271 // -- r3 : FunctionTemplateInfo
4272 // -- cp : context
4273 // -- sp[0] : receiver
4274 // -- sp[8] : first argument
4275 // -- ...
4276 // -- sp[(argc) * 8] : last argument
4277 // -----------------------------------
4278
4279 Register function_callback_info_arg = kCArgRegs[0];
4280
4281 Register api_function_address = no_reg;
4282 Register argc = no_reg;
4283 Register func_templ = no_reg;
4284 Register topmost_script_having_context = no_reg;
4285 Register scratch = r4;
4286
4287 switch (mode) {
4289 argc = CallApiCallbackGenericDescriptor::ActualArgumentsCountRegister();
4290 topmost_script_having_context = CallApiCallbackGenericDescriptor::
4292 func_templ =
4294 break;
4295
4298 // Caller context is always equal to current context because we don't
4299 // inline Api calls cross-context.
4300 topmost_script_having_context = kContextRegister;
4301 api_function_address =
4302 CallApiCallbackOptimizedDescriptor::ApiFunctionAddressRegister();
4304 func_templ =
4306 break;
4307 }
4308 DCHECK(!AreAliased(api_function_address, topmost_script_having_context, argc,
4309 func_templ, scratch));
4310
4311 using FCA = FunctionCallbackArguments;
4312 using ER = ExternalReference;
4313 using FC = ApiCallbackExitFrameConstants;
4314
4315 static_assert(FCA::kArgsLength == 6);
4316 static_assert(FCA::kNewTargetIndex == 5);
4317 static_assert(FCA::kTargetIndex == 4);
4318 static_assert(FCA::kReturnValueIndex == 3);
4319 static_assert(FCA::kContextIndex == 2);
4320 static_assert(FCA::kIsolateIndex == 1);
4321 static_assert(FCA::kUnusedIndex == 0);
4322
4323 // Set up FunctionCallbackInfo's implicit_args on the stack as follows:
4324 // Target state:
4325 // sp[1 * kSystemPointerSize]: kUnused <= FCA::implicit_args_
4326 // sp[2 * kSystemPointerSize]: kIsolate
4327 // sp[3 * kSystemPointerSize]: kContext
4328 // sp[4 * kSystemPointerSize]: undefined (kReturnValue)
4329 // sp[5 * kSystemPointerSize]: kTarget
4330 // sp[6 * kSystemPointerSize]: undefined (kNewTarget)
4331 // Existing state:
4332 // sp[7 * kSystemPointerSize]: <= FCA:::values_
4333
4334 __ StoreRootRelative(IsolateData::topmost_script_having_context_offset(),
4335 topmost_script_having_context);
4336
4337 if (mode == CallApiCallbackMode::kGeneric) {
4338 api_function_address = ReassignRegister(topmost_script_having_context);
4339 }
4340
4341 // Reserve space on the stack.
4342 __ AllocateStackSpace(FCA::kArgsLength * kSystemPointerSize);
4343
4344 // kIsolate.
4345 __ Move(scratch, ER::isolate_address());
4346 __ str(scratch, MemOperand(sp, FCA::kIsolateIndex * kSystemPointerSize));
4347
4348 // kContext.
4349 __ str(cp, MemOperand(sp, FCA::kContextIndex * kSystemPointerSize));
4350
4351 // kReturnValue.
4352 __ LoadRoot(scratch, RootIndex::kUndefinedValue);
4353 __ str(scratch, MemOperand(sp, FCA::kReturnValueIndex * kSystemPointerSize));
4354
4355 // kTarget.
4356 __ str(func_templ, MemOperand(sp, FCA::kTargetIndex * kSystemPointerSize));
4357
4358 // kNewTarget.
4359 __ str(scratch, MemOperand(sp, FCA::kNewTargetIndex * kSystemPointerSize));
4360
4361 // kUnused.
4362 __ str(scratch, MemOperand(sp, FCA::kUnusedIndex * kSystemPointerSize));
4363
4364 FrameScope frame_scope(masm, StackFrame::MANUAL);
4365 if (mode == CallApiCallbackMode::kGeneric) {
4366 __ ldr(
4367 api_function_address,
4368 FieldMemOperand(func_templ,
4369 FunctionTemplateInfo::kMaybeRedirectedCallbackOffset));
4370 }
4371 __ EnterExitFrame(scratch, FC::getExtraSlotsCountFrom<ExitFrameConstants>(),
4372 StackFrame::API_CALLBACK_EXIT);
4373
4374 MemOperand argc_operand = MemOperand(fp, FC::kFCIArgcOffset);
4375 {
4376 ASM_CODE_COMMENT_STRING(masm, "Initialize v8::FunctionCallbackInfo");
4377 // FunctionCallbackInfo::length_.
4378 // TODO(ishell): pass JSParameterCount(argc) to simplify things on the
4379 // caller end.
4380 __ str(argc, argc_operand);
4381
4382 // FunctionCallbackInfo::implicit_args_.
4383 __ add(scratch, fp, Operand(FC::kImplicitArgsArrayOffset));
4384 __ str(scratch, MemOperand(fp, FC::kFCIImplicitArgsOffset));
4385
4386 // FunctionCallbackInfo::values_ (points at JS arguments on the stack).
4387 __ add(scratch, fp, Operand(FC::kFirstArgumentOffset));
4388 __ str(scratch, MemOperand(fp, FC::kFCIValuesOffset));
4389 }
4390
4391 __ RecordComment("v8::FunctionCallback's argument.");
4392 __ add(function_callback_info_arg, fp,
4393 Operand(FC::kFunctionCallbackInfoOffset));
4394
4395 DCHECK(!AreAliased(api_function_address, function_callback_info_arg));
4396
4397 ExternalReference thunk_ref = ER::invoke_function_callback(mode);
4398 Register no_thunk_arg = no_reg;
4399
4400 MemOperand return_value_operand = MemOperand(fp, FC::kReturnValueOffset);
4401 static constexpr int kSlotsToDropOnReturn =
4402 FC::kFunctionCallbackInfoArgsLength + kJSArgcReceiverSlots;
4403
4404 const bool with_profiling =
4406 CallApiFunctionAndReturn(masm, with_profiling, api_function_address,
4407 thunk_ref, no_thunk_arg, kSlotsToDropOnReturn,
4408 &argc_operand, return_value_operand);
4409}
4410
4411void Builtins::Generate_CallApiGetter(MacroAssembler* masm) {
4412 // ----------- S t a t e -------------
4413 // -- cp : context
4414 // -- r1 : receiver
4415 // -- r3 : accessor info
4416 // -- r0 : holder
4417 // -----------------------------------
4418
4419 // Build v8::PropertyCallbackInfo::args_ array on the stack and push property
4420 // name below the exit frame to make GC aware of them.
4421 using PCA = PropertyCallbackArguments;
4422 using ER = ExternalReference;
4423 using FC = ApiAccessorExitFrameConstants;
4424
4425 static_assert(PCA::kPropertyKeyIndex == 0);
4426 static_assert(PCA::kShouldThrowOnErrorIndex == 1);
4427 static_assert(PCA::kHolderIndex == 2);
4428 static_assert(PCA::kIsolateIndex == 3);
4429 static_assert(PCA::kHolderV2Index == 4);
4430 static_assert(PCA::kReturnValueIndex == 5);
4431 static_assert(PCA::kDataIndex == 6);
4432 static_assert(PCA::kThisIndex == 7);
4433 static_assert(PCA::kArgsLength == 8);
4434
4435 // Set up v8::PropertyCallbackInfo's (PCI) args_ on the stack as follows:
4436 // Target state:
4437 // sp[0 * kSystemPointerSize]: name <= PCI::args_
4438 // sp[1 * kSystemPointerSize]: kShouldThrowOnErrorIndex
4439 // sp[2 * kSystemPointerSize]: kHolderIndex
4440 // sp[3 * kSystemPointerSize]: kIsolateIndex
4441 // sp[4 * kSystemPointerSize]: kHolderV2Index
4442 // sp[5 * kSystemPointerSize]: kReturnValueIndex
4443 // sp[6 * kSystemPointerSize]: kDataIndex
4444 // sp[7 * kSystemPointerSize]: kThisIndex / receiver
4445
4446 Register name_arg = kCArgRegs[0];
4447 Register property_callback_info_arg = kCArgRegs[1];
4448
4449 Register api_function_address = r2;
4450 Register receiver = ApiGetterDescriptor::ReceiverRegister();
4453 Register scratch = r4;
4454 Register smi_zero = r5;
4455
4456 DCHECK(!AreAliased(receiver, holder, callback, scratch, smi_zero));
4457
4458 __ ldr(scratch, FieldMemOperand(callback, AccessorInfo::kDataOffset));
4459 __ Push(receiver, scratch); // kThisIndex, kDataIndex
4460 __ LoadRoot(scratch, RootIndex::kUndefinedValue);
4461 __ Move(smi_zero, Smi::zero());
4462 __ Push(scratch, smi_zero); // kReturnValueIndex, kHolderV2Index
4463 __ Move(scratch, ER::isolate_address());
4464 __ Push(scratch, holder); // kIsolateIndex, kHolderIndex
4465
4466 __ ldr(name_arg, FieldMemOperand(callback, AccessorInfo::kNameOffset));
4467 static_assert(kDontThrow == 0);
4468 __ Push(smi_zero, name_arg); // should_throw_on_error -> kDontThrow, name
4469
4470 __ RecordComment("Load api_function_address");
4471 __ ldr(api_function_address,
4472 FieldMemOperand(callback, AccessorInfo::kMaybeRedirectedGetterOffset));
4473
4474 FrameScope frame_scope(masm, StackFrame::MANUAL);
4475 __ EnterExitFrame(scratch, FC::getExtraSlotsCountFrom<ExitFrameConstants>(),
4476 StackFrame::API_ACCESSOR_EXIT);
4477
4478 __ RecordComment("Create v8::PropertyCallbackInfo object on the stack.");
4479 // property_callback_info_arg = v8::PropertyCallbackInfo&
4480 __ add(property_callback_info_arg, fp, Operand(FC::kArgsArrayOffset));
4481
4482 DCHECK(!AreAliased(api_function_address, property_callback_info_arg, name_arg,
4483 callback, scratch));
4484
4485#ifdef V8_ENABLE_DIRECT_HANDLE
4486 // name_arg = Local<Name>(name), name value was pushed to GC-ed stack space.
4487 // |name_arg| is already initialized above.
4488#else
4489 // name_arg = Local<Name>(&name), which is &args_array[kPropertyKeyIndex].
4490 static_assert(PCA::kPropertyKeyIndex == 0);
4491 __ mov(name_arg, property_callback_info_arg);
4492#endif
4493
4494 ExternalReference thunk_ref = ER::invoke_accessor_getter_callback();
4495 // Pass AccessorInfo to thunk wrapper in case profiler or side-effect
4496 // checking is enabled.
4497 Register thunk_arg = callback;
4498
4499 MemOperand return_value_operand = MemOperand(fp, FC::kReturnValueOffset);
4500 static constexpr int kSlotsToDropOnReturn =
4501 FC::kPropertyCallbackInfoArgsLength;
4502 MemOperand* const kUseStackSpaceConstant = nullptr;
4503
4504 const bool with_profiling = true;
4505 CallApiFunctionAndReturn(masm, with_profiling, api_function_address,
4506 thunk_ref, thunk_arg, kSlotsToDropOnReturn,
4507 kUseStackSpaceConstant, return_value_operand);
4508}
4509
4510void Builtins::Generate_DirectCEntry(MacroAssembler* masm) {
4511 // The sole purpose of DirectCEntry is for movable callers (e.g. any general
4512 // purpose InstructionStream object) to be able to call into C functions that
4513 // may trigger GC and thus move the caller.
4514 //
4515 // DirectCEntry places the return address on the stack (updated by the GC),
4516 // making the call GC safe. The irregexp backend relies on this.
4517
4518 __ str(lr, MemOperand(sp, 0)); // Store the return address.
4519 __ blx(ip); // Call the C++ function.
4520 __ ldr(pc, MemOperand(sp, 0)); // Return to calling code.
4521}
4522
4523void Builtins::Generate_MemCopyUint8Uint8(MacroAssembler* masm) {
4524 Register dest = r0;
4525 Register src = r1;
4526 Register chars = r2;
4527 Register temp1 = r3;
4528 Label less_4;
4529
4530 {
4531 UseScratchRegisterScope temps(masm);
4532 Register temp2 = temps.Acquire();
4533 Label loop;
4534
4535 __ bic(temp2, chars, Operand(0x3), SetCC);
4536 __ b(&less_4, eq);
4537 __ add(temp2, dest, temp2);
4538
4539 __ bind(&loop);
4540 __ ldr(temp1, MemOperand(src, 4, PostIndex));
4541 __ str(temp1, MemOperand(dest, 4, PostIndex));
4542 __ cmp(dest, temp2);
4543 __ b(&loop, ne);
4544 }
4545
4546 __ bind(&less_4);
4547 __ mov(chars, Operand(chars, LSL, 31), SetCC);
4548 // bit0 => Z (ne), bit1 => C (cs)
4549 __ ldrh(temp1, MemOperand(src, 2, PostIndex), cs);
4550 __ strh(temp1, MemOperand(dest, 2, PostIndex), cs);
4551 __ ldrb(temp1, MemOperand(src), ne);
4552 __ strb(temp1, MemOperand(dest), ne);
4553 __ Ret();
4554}
4555
4556namespace {
4557
4558// This code tries to be close to ia32 code so that any changes can be
4559// easily ported.
4560void Generate_DeoptimizationEntry(MacroAssembler* masm,
4561 DeoptimizeKind deopt_kind) {
4562 Isolate* isolate = masm->isolate();
4563
4564 // Note: This is an overapproximation; we always reserve space for 32 double
4565 // registers, even though the actual CPU may only support 16. In the latter
4566 // case, SaveFPRegs and RestoreFPRegs still use 32 stack slots, but only fill
4567 // 16.
4568 static constexpr int kDoubleRegsSize =
4570
4571 // Save all allocatable VFP registers before messing with them.
4572 {
4573 UseScratchRegisterScope temps(masm);
4574 Register scratch = temps.Acquire();
4575 __ SaveFPRegs(sp, scratch);
4576 }
4577
4578 // Save all general purpose registers before messing with them.
4579 static constexpr int kNumberOfRegisters = Register::kNumRegisters;
4580 static_assert(kNumberOfRegisters == 16);
4581
4582 // Everything but pc, lr and ip which will be saved but not restored.
4583 RegList restored_regs = kJSCallerSaved | kCalleeSaved | RegList{ip};
4584
4585 // Push all 16 registers (needed to populate FrameDescription::registers_).
4586 // TODO(v8:1588): Note that using pc with stm is deprecated, so we should
4587 // perhaps handle this a bit differently.
4588 __ stm(db_w, sp, restored_regs | RegList{sp, lr, pc});
4589
4590 {
4591 UseScratchRegisterScope temps(masm);
4592 Register scratch = temps.Acquire();
4593 __ Move(scratch, ExternalReference::Create(
4594 IsolateAddressId::kCEntryFPAddress, isolate));
4595 __ str(fp, MemOperand(scratch));
4596 }
4597
4598 static constexpr int kSavedRegistersAreaSize =
4599 (kNumberOfRegisters * kPointerSize) + kDoubleRegsSize;
4600
4601 // Get the address of the location in the code object (r3) (return
4602 // address for lazy deoptimization) and compute the fp-to-sp delta in
4603 // register r4.
4604 __ mov(r2, lr);
4605 __ add(r3, sp, Operand(kSavedRegistersAreaSize));
4606 __ sub(r3, fp, r3);
4607
4608 // Allocate a new deoptimizer object.
4609 // Pass four arguments in r0 to r3 and fifth argument on stack.
4610 __ PrepareCallCFunction(5);
4611 __ mov(r0, Operand(0));
4612 Label context_check;
4614 __ JumpIfSmi(r1, &context_check);
4616 __ bind(&context_check);
4617 __ mov(r1, Operand(static_cast<int>(deopt_kind)));
4618 // r2: code address or 0 already loaded.
4619 // r3: Fp-to-sp delta already loaded.
4621 __ str(r4, MemOperand(sp, 0 * kPointerSize)); // Isolate.
4622 // Call Deoptimizer::New().
4623 {
4624 AllowExternalCallThatCantCauseGC scope(masm);
4625 __ CallCFunction(ExternalReference::new_deoptimizer_function(), 5);
4626 }
4627
4628 // Preserve "deoptimizer" object in register r0 and get the input
4629 // frame descriptor pointer to r1 (deoptimizer->input_);
4630 __ ldr(r1, MemOperand(r0, Deoptimizer::input_offset()));
4631
4632 // Copy core registers into FrameDescription::registers_.
4634 for (int i = 0; i < kNumberOfRegisters; i++) {
4636 __ ldr(r2, MemOperand(sp, i * kPointerSize));
4637 __ str(r2, MemOperand(r1, offset));
4638 }
4639
4640 // Copy simd128 / double registers to the FrameDescription.
4641 static constexpr int kSimd128RegsOffset =
4643 {
4644 UseScratchRegisterScope temps(masm);
4645 Register scratch = temps.Acquire();
4646 Register src_location = r4;
4647 __ add(src_location, sp, Operand(kNumberOfRegisters * kPointerSize));
4648 __ RestoreFPRegs(src_location, scratch);
4649
4650 Register dst_location = r4;
4651 __ add(dst_location, r1, Operand(kSimd128RegsOffset));
4652 __ SaveFPRegsToHeap(dst_location, scratch);
4653 }
4654
4655 // Mark the stack as not iterable for the CPU profiler which won't be able to
4656 // walk the stack without the return address.
4657 {
4658 UseScratchRegisterScope temps(masm);
4659 Register is_iterable = temps.Acquire();
4660 Register zero = r4;
4661 __ LoadIsolateField(is_iterable, IsolateFieldId::kStackIsIterable);
4662 __ mov(zero, Operand(0));
4663 __ strb(zero, MemOperand(is_iterable));
4664 }
4665
4666 // Remove the saved registers from the stack.
4667 __ add(sp, sp, Operand(kSavedRegistersAreaSize));
4668
4669 // Compute a pointer to the unwinding limit in register r2; that is
4670 // the first stack slot not part of the input frame.
4672 __ add(r2, r2, sp);
4673
4674 // Unwind the stack down to - but not including - the unwinding
4675 // limit and copy the contents of the activation frame to the input
4676 // frame description.
4677 __ add(r3, r1, Operand(FrameDescription::frame_content_offset()));
4678 Label pop_loop;
4679 Label pop_loop_header;
4680 __ b(&pop_loop_header);
4681 __ bind(&pop_loop);
4682 __ pop(r4);
4683 __ str(r4, MemOperand(r3, 0));
4684 __ add(r3, r3, Operand(sizeof(uint32_t)));
4685 __ bind(&pop_loop_header);
4686 __ cmp(r2, sp);
4687 __ b(ne, &pop_loop);
4688
4689 // Compute the output frame in the deoptimizer.
4690 __ push(r0); // Preserve deoptimizer object across call.
4691 // r0: deoptimizer object; r1: scratch.
4692 __ PrepareCallCFunction(1);
4693 // Call Deoptimizer::ComputeOutputFrames().
4694 {
4695 AllowExternalCallThatCantCauseGC scope(masm);
4696 __ CallCFunction(ExternalReference::compute_output_frames_function(), 1);
4697 }
4698 __ pop(r0); // Restore deoptimizer object (class Deoptimizer).
4699
4701
4702 // Replace the current (input) frame with the output frames.
4703 Label outer_push_loop, inner_push_loop, outer_loop_header, inner_loop_header;
4704 // Outer loop state: r4 = current "FrameDescription** output_",
4705 // r1 = one past the last FrameDescription**.
4707 __ ldr(r4, MemOperand(r0, Deoptimizer::output_offset())); // r4 is output_.
4708 __ add(r1, r4, Operand(r1, LSL, 2));
4709 __ jmp(&outer_loop_header);
4710 __ bind(&outer_push_loop);
4711 // Inner loop state: r2 = current FrameDescription*, r3 = loop index.
4712 __ ldr(r2, MemOperand(r4, 0)); // output_[ix]
4714 __ jmp(&inner_loop_header);
4715 __ bind(&inner_push_loop);
4716 __ sub(r3, r3, Operand(sizeof(uint32_t)));
4717 __ add(r6, r2, Operand(r3));
4719 __ push(r6);
4720 __ bind(&inner_loop_header);
4721 __ cmp(r3, Operand::Zero());
4722 __ b(ne, &inner_push_loop); // test for gt?
4723 __ add(r4, r4, Operand(kPointerSize));
4724 __ bind(&outer_loop_header);
4725 __ cmp(r4, r1);
4726 __ b(lt, &outer_push_loop);
4727
4728 __ ldr(r1, MemOperand(r0, Deoptimizer::input_offset()));
4729
4730 // State:
4731 // r1: Deoptimizer::input_ (FrameDescription*).
4732 // r2: The last output FrameDescription pointer (FrameDescription*).
4733
4734 // Restore double registers from the output frame description.
4735 {
4736 UseScratchRegisterScope temps(masm);
4737 Register scratch = temps.Acquire();
4738 Register src_location = r6;
4739 __ add(src_location, r2, Operand(kSimd128RegsOffset));
4740 __ RestoreFPRegsFromHeap(src_location, scratch);
4741 }
4742
4743 // Push pc and continuation from the last output frame.
4745 __ push(r6);
4747 __ push(r6);
4748
4749 // Push the registers from the last output frame.
4750 for (int i = kNumberOfRegisters - 1; i >= 0; i--) {
4752 __ ldr(r6, MemOperand(r2, offset));
4753 __ push(r6);
4754 }
4755
4756 // Restore the registers from the stack.
4757 __ ldm(ia_w, sp, restored_regs); // all but pc registers.
4758
4759 {
4760 UseScratchRegisterScope temps(masm);
4761 Register is_iterable = temps.Acquire();
4762 Register one = r4;
4763 __ push(one); // Save the value from the output FrameDescription.
4764 __ LoadIsolateField(is_iterable, IsolateFieldId::kStackIsIterable);
4765 __ mov(one, Operand(1));
4766 __ strb(one, MemOperand(is_iterable));
4767 __ pop(one); // Restore the value from the output FrameDescription.
4768 }
4769
4770 // Remove sp, lr and pc.
4771 __ Drop(3);
4772 {
4773 UseScratchRegisterScope temps(masm);
4774 Register scratch = temps.Acquire();
4775 __ pop(scratch); // get continuation, leave pc on stack
4776 __ pop(lr);
4777 Label end;
4778 __ cmp(scratch, Operand::Zero());
4779 __ b(eq, &end);
4780 __ Jump(scratch);
4781 __ bind(&end);
4782 __ Ret();
4783 }
4784
4785 __ stop();
4786}
4787
4788} // namespace
4789
4790void Builtins::Generate_DeoptimizationEntry_Eager(MacroAssembler* masm) {
4791 Generate_DeoptimizationEntry(masm, DeoptimizeKind::kEager);
4792}
4793
4794void Builtins::Generate_DeoptimizationEntry_Lazy(MacroAssembler* masm) {
4795 Generate_DeoptimizationEntry(masm, DeoptimizeKind::kLazy);
4796}
4797
4798// If there is baseline code on the shared function info, converts an
4799// interpreter frame into a baseline frame and continues execution in baseline
4800// code. Otherwise execution continues with bytecode.
4801void Builtins::Generate_InterpreterOnStackReplacement_ToBaseline(
4802 MacroAssembler* masm) {
4803 Label start;
4804 __ bind(&start);
4805
4806 // Get function from the frame.
4807 Register closure = r1;
4809
4810 // Get the InstructionStream object from the shared function info.
4811 Register code_obj = r4;
4812 __ ldr(code_obj,
4813 FieldMemOperand(closure, JSFunction::kSharedFunctionInfoOffset));
4814
4815 ResetSharedFunctionInfoAge(masm, code_obj, r3);
4816
4817 __ ldr(code_obj,
4818 FieldMemOperand(code_obj,
4819 SharedFunctionInfo::kTrustedFunctionDataOffset));
4820
4821 // For OSR entry it is safe to assume we always have baseline code.
4822 if (v8_flags.debug_code) {
4823 __ CompareObjectType(code_obj, r3, r3, CODE_TYPE);
4824 __ Assert(eq, AbortReason::kExpectedBaselineData);
4825 AssertCodeIsBaseline(masm, code_obj, r3);
4826 }
4827
4828 // Load the feedback cell and vector.
4829 Register feedback_cell = r2;
4830 Register feedback_vector = r9;
4831 __ ldr(feedback_cell,
4832 FieldMemOperand(closure, JSFunction::kFeedbackCellOffset));
4833 __ ldr(feedback_vector,
4834 FieldMemOperand(feedback_cell, FeedbackCell::kValueOffset));
4835
4836 Label install_baseline_code;
4837 // Check if feedback vector is valid. If not, call prepare for baseline to
4838 // allocate it.
4839 __ CompareObjectType(feedback_vector, r3, r3, FEEDBACK_VECTOR_TYPE);
4840 __ b(ne, &install_baseline_code);
4841
4842 // Save BytecodeOffset from the stack frame.
4846 // Replace bytecode offset with feedback cell.
4849 __ str(feedback_cell,
4851 feedback_cell = no_reg;
4852 // Update feedback vector cache.
4855 __ str(feedback_vector,
4857 feedback_vector = no_reg;
4858
4859 // Compute baseline pc for bytecode offset.
4860 Register get_baseline_pc = r3;
4861 __ Move(get_baseline_pc,
4862 ExternalReference::baseline_pc_for_next_executed_bytecode());
4863
4866
4867 // Get bytecode array from the stack frame.
4870 // Save the accumulator register, since it's clobbered by the below call.
4872 {
4873 __ mov(kCArgRegs[0], code_obj);
4876 FrameScope scope(masm, StackFrame::INTERNAL);
4877 __ PrepareCallCFunction(3, 0);
4878 __ CallCFunction(get_baseline_pc, 3, 0);
4879 }
4880 __ LoadCodeInstructionStart(code_obj, code_obj);
4881 __ add(code_obj, code_obj, kReturnRegister0);
4883
4884 Generate_OSREntry(masm, code_obj);
4885 __ Trap(); // Unreachable.
4886
4887 __ bind(&install_baseline_code);
4888 {
4889 FrameScope scope(masm, StackFrame::INTERNAL);
4891 __ Push(closure);
4892 __ CallRuntime(Runtime::kInstallBaselineCode, 1);
4894 }
4895 // Retry from the start after installing baseline code.
4896 __ b(&start);
4897}
4898
4899void Builtins::Generate_RestartFrameTrampoline(MacroAssembler* masm) {
4900 // Frame is being dropped:
4901 // - Look up current function on the frame.
4902 // - Leave the frame.
4903 // - Restart the frame by calling the function.
4904
4907 __ LeaveFrame(StackFrame::INTERNAL);
4908
4909 // The arguments are already in the stack (including any necessary padding),
4910 // we should not try to massage the arguments again.
4911 __ mov(r2, Operand(kDontAdaptArgumentsSentinel));
4912 __ InvokeFunction(r1, r2, r0, InvokeType::kJump);
4913}
4914
4915#undef __
4916
4917} // namespace internal
4918} // namespace v8
4919
4920#endif // V8_TARGET_ARCH_ARM
#define one
#define Assert(condition)
const RegList initial_
RegList available_
#define JUMP_IF_EQUAL(NAME)
#define ASSIGN_REG(Name)
RegisterAllocator * allocator_
std::vector< Register * > allocated_registers_
#define ASSIGN_PINNED(Name, Reg)
#define DEFINE_PINNED(Name, Reg)
#define DEFINE_SCOPED(Name)
Register * reg_
#define FREE_REG(Name)
#define DEFINE_REG(Name)
interpreter::Bytecode bytecode
Definition builtins.cc:43
#define RETURN_BYTECODE_LIST(V)
Definition bytecodes.h:575
ConstantPool::BlockScope BlockConstPoolScope
static void Generate_InterpreterPushArgsThenConstructImpl(MacroAssembler *masm, InterpreterPushArgsMode mode)
static void Generate_CallOrConstructForwardVarargs(MacroAssembler *masm, CallOrConstructMode mode, Builtin target_builtin)
static CallInterfaceDescriptor CallInterfaceDescriptorFor(Builtin builtin)
Definition builtins.cc:189
static void Generate_InterpreterEntryTrampoline(MacroAssembler *masm, InterpreterEntryTrampolineMode mode)
static void Generate_Adaptor(MacroAssembler *masm, int formal_parameter_count, Address builtin_address)
static void Generate_CEntry(MacroAssembler *masm, int result_size, ArgvMode argv_mode, bool builtin_exit_frame, bool switch_to_central_stack)
static constexpr Builtin CallFunction(ConvertReceiverMode=ConvertReceiverMode::kAny)
static constexpr Builtin AdaptorWithBuiltinExitFrame(int formal_parameter_count)
static void Generate_MaglevFunctionEntryStackCheck(MacroAssembler *masm, bool save_new_target)
static void Generate_Call(MacroAssembler *masm, ConvertReceiverMode mode)
static void Generate_CallFunction(MacroAssembler *masm, ConvertReceiverMode mode)
static void Generate_CallOrConstructVarargs(MacroAssembler *masm, Builtin target_builtin)
static void Generate_CallApiCallbackImpl(MacroAssembler *masm, CallApiCallbackMode mode)
static constexpr Builtin Call(ConvertReceiverMode=ConvertReceiverMode::kAny)
static void Generate_CallBoundFunctionImpl(MacroAssembler *masm)
static void Generate_ConstructForwardAllArgsImpl(MacroAssembler *masm, ForwardWhichFrame which_frame)
static void Generate_InterpreterPushArgsThenCallImpl(MacroAssembler *masm, ConvertReceiverMode receiver_mode, InterpreterPushArgsMode mode)
static DEFINE_PARAMETERS_VARARGS(kActualArgumentsCount, kTopmostScriptHavingContext, kFunctionTemplateInfo) DEFINE_PARAMETER_TYPES(MachineType constexpr Register TopmostScriptHavingContextRegister()
static DEFINE_PARAMETERS_VARARGS(kApiFunctionAddress, kActualArgumentsCount, kFunctionTemplateInfo) DEFINE_PARAMETER_TYPES(MachineType constexpr Register ActualArgumentsCountRegister()
static constexpr int kContextOrFrameTypeOffset
static constexpr int kFixedSlotCountAboveFp
static constexpr int kFixedFrameSizeAboveFp
static int caller_frame_top_offset()
static int output_count_offset()
static constexpr int kNextExitFrameFPOffset
static constexpr int kNextFastCallFramePCOffset
static V8_EXPORT_PRIVATE ExternalReference isolate_address()
static ExternalReference Create(const SCTableReference &table_ref)
static constexpr int simd128_registers_offset()
static const int kMantissaBitsInTopWord
Definition heap-number.h:45
static const int kExponentBits
Definition heap-number.h:40
static const int kExponentBias
Definition heap-number.h:41
static const int kExponentShift
Definition heap-number.h:42
static constexpr int kHeaderSize
static constexpr int kMapOffset
static constexpr uint32_t thread_in_wasm_flag_address_offset()
Definition isolate.h:1338
static int32_t RootRegisterOffsetForRootIndex(RootIndex root_index)
static V8_INLINE Operand SmiUntag(Register rm)
static V8_INLINE Operand Zero()
constexpr RegisterT first() const
constexpr RegisterT last() const
constexpr unsigned Count() const
constexpr int8_t code() const
static const RegisterConfiguration * Default()
static constexpr Register from_code(int code)
static constexpr Tagged< Smi > FromInt(int value)
Definition smi.h:38
static constexpr Tagged< Smi > zero()
Definition smi.h:99
static constexpr int32_t TypeToMarker(Type type)
Definition frames.h:196
static constexpr int kFixedFrameSizeFromFp
static constexpr int kFrameTypeOffset
static constexpr DoubleRegList kPushedFpRegs
static constexpr int SharedFunctionInfoOffsetInTaggedJSFunction()
#define ASM_CODE_COMMENT_STRING(asm,...)
Definition assembler.h:618
#define ASM_CODE_COMMENT(asm)
Definition assembler.h:617
#define V8_ENABLE_SANDBOX_BOOL
Definition globals.h:160
int start
int end
bool is_construct
Definition execution.cc:82
int32_t offset
TNode< Context > context
TNode< Object > receiver
TNode< Object > callback
LiftoffRegister reg
int pc_offset
RegListBase< RegisterT > registers
const int length_
Definition mul-fft.cc:473
STL namespace.
int int32_t
Definition unicode.cc:40
constexpr bool IsPowerOfTwo(T value)
Definition bits.h:187
void Free(void *memory)
Definition memory.h:63
Node::Uses::const_iterator begin(const Node::Uses &uses)
Definition node.h:708
ApiCallbackExitFrameConstants FC
Definition frames.cc:1270
FunctionCallbackArguments FCA
Definition frames.cc:1271
void push(LiftoffAssembler *assm, LiftoffRegister reg, ValueKind kind, int padding=0)
constexpr int kStackStateOffset
Definition stacks.h:212
constexpr DoubleRegister kFpReturnRegisters[]
constexpr int kStackSpOffset
Definition stacks.h:202
constexpr int kStackFpOffset
Definition stacks.h:204
constexpr Register kGpParamRegisters[]
constexpr DoubleRegister kFpParamRegisters[]
constexpr int kStackParentOffset
Definition stacks.h:210
uint32_t WasmInterpreterRuntime int64_t r0
constexpr Register kGpReturnRegisters[]
constexpr int kStackLimitOffset
Definition stacks.h:208
constexpr int kStackPcOffset
Definition stacks.h:206
constexpr Register no_reg
constexpr Register kRootRegister
constexpr AddrMode PreIndex
constexpr int kByteSize
Definition globals.h:395
constexpr int kFunctionEntryBytecodeOffset
Definition globals.h:854
constexpr int kPointerSizeLog2
Definition globals.h:600
constexpr BlockAddrMode ia_w
const RegList kCalleeSaved
Definition reglist-arm.h:31
static void Generate_InterpreterEnterBytecode(MacroAssembler *masm)
constexpr ShiftOp LSR
RegListBase< Register > RegList
Definition reglist-arm.h:14
constexpr Register kJavaScriptCallTargetRegister
constexpr int kPointerSize
Definition globals.h:599
constexpr BlockAddrMode db_w
constexpr int kNumberOfRegisters
constexpr ShiftOp ASR
constexpr ShiftOp LSL
constexpr SBit LeaveCC
constexpr uint16_t kDontAdaptArgumentsSentinel
Definition globals.h:2779
constexpr LowDwVfpRegister kLastCalleeSavedDoubleReg
constexpr Register kJavaScriptCallArgCountRegister
constexpr Register kInterpreterAccumulatorRegister
constexpr int kSystemPointerSizeLog2
Definition globals.h:494
InterpreterPushArgsMode
Definition globals.h:2233
constexpr int kJSArgcReceiverSlots
Definition globals.h:2778
static void GenerateInterpreterPushArgs(MacroAssembler *masm, Register num_args, Register start_address, Register scratch)
static void AdvanceBytecodeOffsetOrReturn(MacroAssembler *masm, Register bytecode_array, Register bytecode_offset, Register bytecode, Register scratch1, Register scratch2, Register scratch3, Label *if_return)
MemOperand FieldMemOperand(Register object, int offset)
constexpr int kSystemPointerSize
Definition globals.h:410
static void LeaveInterpreterFrame(MacroAssembler *masm, Register scratch1, Register scratch2)
constexpr Register kReturnRegister1
constexpr int kTaggedSizeLog2
Definition globals.h:543
constexpr Register kReturnRegister0
@ LAST_CALLABLE_JS_FUNCTION_TYPE
@ FIRST_CALLABLE_JS_FUNCTION_TYPE
constexpr Register kWasmImplicitArgRegister
constexpr Register kContextRegister
V8_EXPORT_PRIVATE bool AreAliased(const CPURegister &reg1, const CPURegister &reg2, const CPURegister &reg3=NoReg, const CPURegister &reg4=NoReg, const CPURegister &reg5=NoReg, const CPURegister &reg6=NoReg, const CPURegister &reg7=NoReg, const CPURegister &reg8=NoReg)
constexpr Register kInterpreterDispatchTableRegister
const int kHeapObjectTag
Definition v8-internal.h:72
constexpr LowDwVfpRegister kDoubleRegZero
V8_EXPORT_PRIVATE FlagValues v8_flags
constexpr Register kJavaScriptCallExtraArg1Register
const RegList kJSCallerSaved
Definition reglist-arm.h:23
constexpr int JSParameterCount(int param_count_without_receiver)
Definition globals.h:2782
constexpr Register kJavaScriptCallCodeStartRegister
constexpr AddrMode PostIndex
constexpr SBit SetCC
Register ReassignRegister(Register &source)
constexpr Register kWasmCompileLazyFuncIndexRegister
static void AssertCodeIsBaseline(MacroAssembler *masm, Register code, Register scratch)
static void Generate_JSEntryTrampolineHelper(MacroAssembler *masm, bool is_construct)
constexpr LowDwVfpRegister kFirstCalleeSavedDoubleReg
void CallApiFunctionAndReturn(MacroAssembler *masm, bool with_profiling, Register function_address, ExternalReference thunk_ref, Register thunk_arg, int slots_to_drop_on_return, MemOperand *argc_operand, MemOperand return_value_operand)
Register GetRegisterThatIsNotOneOf(Register reg1, Register reg2=no_reg, Register reg3=no_reg, Register reg4=no_reg, Register reg5=no_reg, Register reg6=no_reg)
constexpr Register cp
constexpr Register kCArgRegs[]
constexpr int kDoubleSize
Definition globals.h:407
const int kNumCalleeSaved
Definition reglist-arm.h:48
const int kNumDoubleCalleeSaved
Definition reglist-arm.h:51
static void GetSharedFunctionInfoBytecodeOrBaseline(MacroAssembler *masm, Register sfi, Register bytecode, Register scratch1, Label *is_baseline, Label *is_unavailable)
constexpr Register kInterpreterBytecodeOffsetRegister
constexpr Register kJavaScriptCallNewTargetRegister
constexpr Register kJSFunctionRegister
constexpr Register kInterpreterBytecodeArrayRegister
Definition c-api.cc:87
#define CHECK(condition)
Definition logging.h:124
#define DCHECK_NE(v1, v2)
Definition logging.h:486
#define CHECK_EQ(lhs, rhs)
#define DCHECK(condition)
Definition logging.h:482
#define DCHECK_EQ(v1, v2)
Definition logging.h:485
#define USE(...)
Definition macros.h:293
constexpr T RoundUp(T x, intptr_t m)
Definition macros.h:387
#define arraysize(array)
Definition macros.h:67
#define OFFSET_OF_DATA_START(Type)