v8
V8 is Google’s open source high-performance JavaScript and WebAssembly engine, written in C++.
Loading...
Searching...
No Matches
macro-assembler-loong64.h
Go to the documentation of this file.
1// Copyright 2021 the V8 project authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
5#ifndef V8_CODEGEN_LOONG64_MACRO_ASSEMBLER_LOONG64_H_
6#define V8_CODEGEN_LOONG64_MACRO_ASSEMBLER_LOONG64_H_
7
8#ifndef INCLUDED_FROM_MACRO_ASSEMBLER_H
9#error This header must be included via macro-assembler.h
10#endif
11
12#include <optional>
13
16#include "src/common/globals.h"
20
21namespace v8 {
22namespace internal {
23
24// Forward declarations.
25enum class AbortReason : uint8_t;
26
27// Flags used for the li macro-assembler function.
28enum LiFlags {
29 // If the constant value can be represented in just 12 bits, then
30 // optimize the li to use a single instruction, rather than lu12i_w/lu32i_d/
31 // lu52i_d/ori sequence. A number of other optimizations that emits less than
32 // maximum number of instructions exists.
34 // Always use 4 instructions (lu12i_w/ori/lu32i_d/lu52i_d sequence),
35 // even if the constant could be loaded with just one, so that this value is
36 // patchable later.
38 // For address loads only 3 instruction are required. Used to mark
39 // constant load that will be used as address without relocation
40 // information. It ensures predictable code size, so specific sites
41 // in code are patchable.
42 ADDRESS_LOAD = 2
43};
44
46
47Register GetRegisterThatIsNotOneOf(Register reg1, Register reg2 = no_reg,
48 Register reg3 = no_reg,
49 Register reg4 = no_reg,
50 Register reg5 = no_reg,
51 Register reg6 = no_reg);
52
53// -----------------------------------------------------------------------------
54// Static helper functions.
55
56#define SmiWordOffset(offset) (offset + kSystemPointerSize / 2)
57
58// Generate a MemOperand for loading a field from an object.
59inline MemOperand FieldMemOperand(Register object, int offset) {
60 return MemOperand(object, offset - kHeapObjectTag);
61}
62
63class V8_EXPORT_PRIVATE MacroAssembler : public MacroAssemblerBase {
64 public:
65 using MacroAssemblerBase::MacroAssemblerBase;
66
67 // Activation support.
69 void EnterFrame(StackFrame::Type type, bool load_constant_pool_pointer_reg) {
70 // Out-of-line constant pool not implemented on loong64.
72 }
74
75 void AllocateStackSpace(Register bytes) { Sub_d(sp, sp, bytes); }
76
77 void AllocateStackSpace(int bytes) {
78 DCHECK_GE(bytes, 0);
79 if (bytes == 0) return;
80 Sub_d(sp, sp, Operand(bytes));
81 }
82
83 // Generates function and stub prologue code.
85 void Prologue();
86
88 ExternalReference isolate_root = ExternalReference::isolate_root(isolate());
89 li(kRootRegister, Operand(isolate_root));
90#ifdef V8_COMPRESS_POINTERS
91 LoadRootRelative(kPtrComprCageBaseRegister,
92 IsolateData::cage_base_offset());
93#endif
94 }
95
96 // Jump unconditionally to given label.
97 // Use rather b(Label) for code generation.
98 void jmp(Label* L) { Branch(L); }
99
100 // -------------------------------------------------------------------------
101 // Debugging.
102
103 void Trap();
105
106 // Calls Abort(msg) if the condition cc is not satisfied.
107 // Use --debug_code to enable.
108 void Assert(Condition cc, AbortReason reason, Register rj,
110
111 void AssertJSAny(Register object, Register map_tmp, Register tmp,
113
114 // Like Assert(), but always enabled.
115 void Check(Condition cc, AbortReason reason, Register rj, Operand rk);
116
117 // Same as Check() but expresses that the check is needed for the sandbox.
118 void SbxCheck(Condition cc, AbortReason reason, Register rj, Operand rk);
119
120 // Print a message to stdout and abort execution.
122
124 const Operand& rhs);
125 void Branch(Label* label, bool need_link = false);
126 void Branch(Label* label, Condition cond, Register r1, const Operand& r2,
127 bool need_link = false);
128 void BranchShort(Label* label, Condition cond, Register r1, const Operand& r2,
129 bool need_link = false);
130 void Branch(Label* L, Condition cond, Register rj, RootIndex index,
131 bool need_sign_extend = true);
132
133 void CompareTaggedAndBranch(Label* label, Condition cond, Register r1,
134 const Operand& r2, bool need_link = false);
135
136 // Floating point branches
138 CFRegister cd = FCC0) {
139 CompareF(cmp1, cmp2, cc, cd, true);
140 }
141
143 CFRegister cd = FCC0) {
144 CompareIsNanF(cmp1, cmp2, cd, true);
145 }
146
148 CFRegister cd = FCC0) {
149 CompareF(cmp1, cmp2, cc, cd, false);
150 }
151
153 CFRegister cd = FCC0) {
154 CompareIsNanF(cmp1, cmp2, cd, false);
155 }
156
157 void BranchTrueShortF(Label* target, CFRegister cc = FCC0);
158 void BranchFalseShortF(Label* target, CFRegister cc = FCC0);
159
160 void BranchTrueF(Label* target, CFRegister cc = FCC0);
161 void BranchFalseF(Label* target, CFRegister cc = FCC0);
162
163 static int InstrCountForLi64Bit(int64_t value);
165 void li_optimized(Register rd, Operand j, LiFlags mode = OPTIMIZE_SIZE);
166 void li(Register rd, Operand j, LiFlags mode = OPTIMIZE_SIZE);
167 inline void li(Register rd, int64_t j, LiFlags mode = OPTIMIZE_SIZE) {
168 li(rd, Operand(j), mode);
169 }
170 inline void li(Register rd, int32_t j, LiFlags mode = OPTIMIZE_SIZE) {
171 li(rd, Operand(static_cast<int64_t>(j)), mode);
172 }
174 RelocInfo::Mode rmode = RelocInfo::NO_INFO,
175 LiFlags mode = OPTIMIZE_SIZE);
176 void li(Register dst, ExternalReference value, LiFlags mode = OPTIMIZE_SIZE);
177 void LoadLabelRelative(Register dst, Label* target);
178
179 void LoadFromConstantsTable(Register destination, int constant_index) final;
182 void StoreRootRelative(int32_t offset, Register value) final;
183
184 // Operand pointing to an external reference.
185 // May emit code to set up the scratch register. The operand is
186 // only guaranteed to be correct as long as the scratch register
187 // isn't changed.
188 // If the operand is used more than once, use a scratch register
189 // that is guaranteed not to be clobbered.
191 Register scratch);
193 return ExternalReferenceAsOperand(ExternalReference::Create(id), no_reg);
194 }
195
196 inline void Move(Register output, MemOperand operand) {
197 Ld_d(output, operand);
198 }
199
200 inline void GenPCRelativeJump(Register rd, int64_t offset);
201 inline void GenPCRelativeJumpAndLink(Register rd, int64_t offset);
202
203// Jump, Call, and Ret pseudo instructions implementing inter-working.
204#define COND_ARGS \
205 Condition cond = al, Register rj = zero_reg, \
206 const Operand &rk = Operand(zero_reg)
207
208 // We should not use near calls or jumps for calls to external references,
209 // since the code spaces are not guaranteed to be close to each other.
211 return rmode != RelocInfo::EXTERNAL_REFERENCE;
212 }
213
214 static bool IsNearCallOffset(int64_t offset);
215
216 static int64_t CalculateTargetOffset(Address target, RelocInfo::Mode rmode,
217 uint8_t* pc);
218
219 void Jump(Register target, COND_ARGS);
220 void Jump(intptr_t target, RelocInfo::Mode rmode, COND_ARGS);
221 void Jump(Address target, RelocInfo::Mode rmode, COND_ARGS);
223 void Jump(const ExternalReference& reference);
224 void Call(Register target, COND_ARGS);
225 void Call(Address target, RelocInfo::Mode rmode, COND_ARGS);
226 void Call(Handle<Code> code, RelocInfo::Mode rmode = RelocInfo::CODE_TARGET,
227 COND_ARGS);
228 void Call(Label* target);
229
230 // Load the builtin given by the Smi in |builtin_index| into |target|.
231 void LoadEntryFromBuiltinIndex(Register builtin_index, Register target);
234
235 void CallBuiltinByIndex(Register builtin_index, Register target);
236 void CallBuiltin(Builtin builtin);
238 void TailCallBuiltin(Builtin builtin, Condition cond, Register type,
239 Operand range);
240
241 // Load the code entry point from the Code object.
246 JumpMode jump_mode = JumpMode::kJump);
247
248 // Convenience functions to call/jmp to the code of a JSFunction object.
249 // TODO(42204201): These don't work properly with leaptiering as we need to
250 // validate the parameter count at runtime. Instead, we should replace them
251 // with CallJSDispatchEntry that generates a call to a given (compile-time
252 // constant) JSDispatchHandle.
253 void CallJSFunction(Register function_object, uint16_t argument_count);
254 void JumpJSFunction(Register function_object,
255 JumpMode jump_mode = JumpMode::kJump);
256
257#ifdef V8_ENABLE_LEAPTIERING
258 void CallJSDispatchEntry(JSDispatchHandle dispatch_handle,
259 uint16_t argument_count);
260#endif
261#ifdef V8_ENABLE_WEBASSEMBLY
262 void ResolveWasmCodePointer(Register target, uint64_t signature_hash);
263 void CallWasmCodePointer(Register target, uint64_t signature_hash,
264 CallJumpMode call_jump_mode = CallJumpMode::kCall);
265 void CallWasmCodePointerNoSignatureCheck(Register target);
266 void LoadWasmCodePointer(Register dst, MemOperand src);
267#endif
268
269 // Generates an instruction sequence s.t. the return address points to the
270 // instruction following the call.
271 // The return address on the stack is used by frame iteration.
273
274 // TODO(olivf, 42204201) Rename this to AssertNotDeoptimized once
275 // non-leaptiering is removed from the codebase.
277 void CallForDeoptimization(Builtin target, int deopt_id, Label* exit,
279 Label* jump_deoptimization_entry_label);
280
282
283 // Emit code to discard a non-negative number of pointer-sized elements
284 // from the stack, clobbering only the sp register.
285 void Drop(int count, Condition cond = cc_always, Register reg = no_reg,
286 const Operand& op = Operand(no_reg));
287
290
291 void Ld_d(Register rd, const MemOperand& rj);
292 void St_d(Register rd, const MemOperand& rj);
293
295 void Push(Tagged<Smi> smi);
296
297 void Push(Register src) {
298 Add_d(sp, sp, Operand(-kSystemPointerSize));
299 St_d(src, MemOperand(sp, 0));
300 }
301
302 // Push two registers. Pushes leftmost register first (to highest address).
303 void Push(Register src1, Register src2) {
304 Sub_d(sp, sp, Operand(2 * kSystemPointerSize));
305 St_d(src1, MemOperand(sp, 1 * kSystemPointerSize));
306 St_d(src2, MemOperand(sp, 0 * kSystemPointerSize));
307 }
308
309 // Push three registers. Pushes leftmost register first (to highest address).
310 void Push(Register src1, Register src2, Register src3) {
311 Sub_d(sp, sp, Operand(3 * kSystemPointerSize));
312 St_d(src1, MemOperand(sp, 2 * kSystemPointerSize));
313 St_d(src2, MemOperand(sp, 1 * kSystemPointerSize));
314 St_d(src3, MemOperand(sp, 0 * kSystemPointerSize));
315 }
316
317 // Push four registers. Pushes leftmost register first (to highest address).
318 void Push(Register src1, Register src2, Register src3, Register src4) {
319 Sub_d(sp, sp, Operand(4 * kSystemPointerSize));
320 St_d(src1, MemOperand(sp, 3 * kSystemPointerSize));
321 St_d(src2, MemOperand(sp, 2 * kSystemPointerSize));
322 St_d(src3, MemOperand(sp, 1 * kSystemPointerSize));
323 St_d(src4, MemOperand(sp, 0 * kSystemPointerSize));
324 }
325
326 // Push five registers. Pushes leftmost register first (to highest address).
327 void Push(Register src1, Register src2, Register src3, Register src4,
328 Register src5) {
329 Sub_d(sp, sp, Operand(5 * kSystemPointerSize));
330 St_d(src1, MemOperand(sp, 4 * kSystemPointerSize));
331 St_d(src2, MemOperand(sp, 3 * kSystemPointerSize));
332 St_d(src3, MemOperand(sp, 2 * kSystemPointerSize));
333 St_d(src4, MemOperand(sp, 1 * kSystemPointerSize));
334 St_d(src5, MemOperand(sp, 0 * kSystemPointerSize));
335 }
336
337 enum PushArrayOrder { kNormal, kReverse };
338 void PushArray(Register array, Register size, Register scratch,
339 Register scratch2, PushArrayOrder order = kNormal);
340
343
345 SaveFPRegsMode fp_mode);
346
348 SaveFPRegsMode fp_mode,
350
352 Register object, Operand offset, SaveFPRegsMode fp_mode,
353 StubCallMode mode = StubCallMode::kCallBuiltinPointer);
355 Register object, Register slot_address, SaveFPRegsMode fp_mode,
356 StubCallMode mode = StubCallMode::kCallBuiltinPointer);
357
358 // For a given |object| and |offset|:
359 // - Move |object| to |dst_object|.
360 // - Compute the address of the slot pointed to by |offset| in |object| and
361 // write it to |dst_slot|.
362 // This method makes sure |object| and |offset| are allowed to overlap with
363 // the destination registers.
364 void MoveObjectAndSlot(Register dst_object, Register dst_slot,
365 Register object, Operand offset);
366
367 // Push multiple registers on the stack.
368 // Registers are saved in numerical order, with higher numbered registers
369 // saved in higher memory addresses.
370 void MultiPush(RegList regs);
371 void MultiPush(RegList regs1, RegList regs2);
372 void MultiPush(RegList regs1, RegList regs2, RegList regs3);
373 void MultiPushFPU(DoubleRegList regs);
374
375 // Calculate how much stack space (in bytes) are required to store caller
376 // registers excluding those specified in the arguments.
378 Register exclusion1 = no_reg,
379 Register exclusion2 = no_reg,
380 Register exclusion3 = no_reg) const;
381
382 // Push caller saved registers on the stack, and return the number of bytes
383 // stack pointer is adjusted.
384 int PushCallerSaved(SaveFPRegsMode fp_mode, Register exclusion1 = no_reg,
385 Register exclusion2 = no_reg,
386 Register exclusion3 = no_reg);
387 // Restore caller saved registers from the stack, and return the number of
388 // bytes stack pointer is adjusted.
389 int PopCallerSaved(SaveFPRegsMode fp_mode, Register exclusion1 = no_reg,
390 Register exclusion2 = no_reg,
391 Register exclusion3 = no_reg);
392
393 void Pop(Register dst) {
394 Ld_d(dst, MemOperand(sp, 0));
395 Add_d(sp, sp, Operand(kSystemPointerSize));
396 }
397
398 // Pop two registers. Pops rightmost register first (from lower address).
399 void Pop(Register src1, Register src2) {
400 DCHECK(src1 != src2);
401 Ld_d(src2, MemOperand(sp, 0 * kSystemPointerSize));
402 Ld_d(src1, MemOperand(sp, 1 * kSystemPointerSize));
403 Add_d(sp, sp, 2 * kSystemPointerSize);
404 }
405
406 // Pop three registers. Pops rightmost register first (from lower address).
407 void Pop(Register src1, Register src2, Register src3) {
408 Ld_d(src3, MemOperand(sp, 0 * kSystemPointerSize));
409 Ld_d(src2, MemOperand(sp, 1 * kSystemPointerSize));
410 Ld_d(src1, MemOperand(sp, 2 * kSystemPointerSize));
411 Add_d(sp, sp, 3 * kSystemPointerSize);
412 }
413
414 // Pops multiple values from the stack and load them in the
415 // registers specified in regs. Pop order is the opposite as in MultiPush.
416 void MultiPop(RegList regs);
417 void MultiPop(RegList regs1, RegList regs2);
418 void MultiPop(RegList regs1, RegList regs2, RegList regs3);
419
420 void MultiPopFPU(DoubleRegList regs);
421
422#define DEFINE_INSTRUCTION(instr) \
423 void instr(Register rd, Register rj, const Operand& rk); \
424 void instr(Register rd, Register rj, Register rk) { \
425 instr(rd, rj, Operand(rk)); \
426 } \
427 void instr(Register rj, Register rk, int32_t j) { instr(rj, rk, Operand(j)); }
428
429#define DEFINE_INSTRUCTION2(instr) \
430 void instr(Register rj, const Operand& rk); \
431 void instr(Register rj, Register rk) { instr(rj, Operand(rk)); } \
432 void instr(Register rj, int32_t j) { instr(rj, Operand(j)); }
433
434 DEFINE_INSTRUCTION(Add_w)
435 DEFINE_INSTRUCTION(Add_d)
436 DEFINE_INSTRUCTION(Div_w)
437 DEFINE_INSTRUCTION(Div_wu)
438 DEFINE_INSTRUCTION(Div_du)
439 DEFINE_INSTRUCTION(Mod_w)
440 DEFINE_INSTRUCTION(Mod_wu)
441 DEFINE_INSTRUCTION(Div_d)
442 DEFINE_INSTRUCTION(Sub_w)
443 DEFINE_INSTRUCTION(Sub_d)
444 DEFINE_INSTRUCTION(Mod_d)
445 DEFINE_INSTRUCTION(Mod_du)
446 DEFINE_INSTRUCTION(Mul_w)
447 DEFINE_INSTRUCTION(Mulh_w)
448 DEFINE_INSTRUCTION(Mulh_wu)
449 DEFINE_INSTRUCTION(Mul_d)
450 DEFINE_INSTRUCTION(Mulh_d)
451 DEFINE_INSTRUCTION(Mulh_du)
454 DEFINE_INSTRUCTION2(Div_wu)
455 DEFINE_INSTRUCTION2(Div_du)
456
464
468 DEFINE_INSTRUCTION(Sltiu)
475
476 DEFINE_INSTRUCTION(Rotr_w)
477 DEFINE_INSTRUCTION(Rotr_d)
478
479#undef DEFINE_INSTRUCTION
480#undef DEFINE_INSTRUCTION2
481#undef DEFINE_INSTRUCTION3
482
483 void SmiTag(Register dst, Register src) {
484 static_assert(kSmiTag == 0);
485 if (SmiValuesAre32Bits()) {
486 slli_d(dst, src, 32);
487 } else {
489 add_w(dst, src, src);
490 }
491 }
492
494
495 void SmiUntag(Register dst, const MemOperand& src);
496 void SmiUntag(Register dst, Register src) {
497 if (SmiValuesAre32Bits()) {
498 srai_d(dst, src, kSmiShift);
499 } else {
501 srai_w(dst, src, kSmiShift);
502 }
503 }
504
506
507 // Left-shifted from int32 equivalent of Smi.
508 void SmiScale(Register dst, Register src, int scale) {
509 if (SmiValuesAre32Bits()) {
510 // The int portion is upper 32-bits of 64-bit word.
511 srai_d(dst, src, kSmiShift - scale);
512 } else {
515 slli_w(dst, src, scale - kSmiTagSize);
516 }
517 }
518
519 // On LoongArch64, we should sign-extend 32-bit values.
521 if (v8_flags.enable_slow_asserts) {
522 AssertSmi(smi);
523 }
525 SmiUntag(smi, smi);
526 }
527
528 // Abort execution if argument is a smi, enabled via --debug-code.
531
532 int CalculateStackPassedWords(int num_reg_arguments,
533 int num_double_arguments);
534
535 // Before calling a C-function from generated code, align arguments on stack.
536 // After aligning the frame, non-register arguments must be stored on the
537 // stack, after the argument-slots using helper: CFunctionArgumentOperand().
538 // The argument count assumes all arguments are word sized.
539 // Some compilers/platforms require the stack to be aligned when calling
540 // C++ code.
541 // Needs a scratch register to do some arithmetic. This register will be
542 // trashed.
543 void PrepareCallCFunction(int num_reg_arguments, int num_double_registers,
544 Register scratch);
545 void PrepareCallCFunction(int num_reg_arguments, Register scratch);
546
547 // Calls a C function and cleans up the space for arguments allocated
548 // by PrepareCallCFunction. The called function is not allowed to trigger a
549 // garbage collection, since that might move the code and invalidate the
550 // return address (unless this is somehow accounted for by the called
551 // function).
553 ExternalReference function, int num_arguments,
554 SetIsolateDataSlots set_isolate_data_slots = SetIsolateDataSlots::kYes,
555 Label* return_location = nullptr);
557 Register function, int num_arguments,
558 SetIsolateDataSlots set_isolate_data_slots = SetIsolateDataSlots::kYes,
559 Label* return_location = nullptr);
561 ExternalReference function, int num_reg_arguments,
562 int num_double_arguments,
563 SetIsolateDataSlots set_isolate_data_slots = SetIsolateDataSlots::kYes,
564 Label* return_location = nullptr);
566 Register function, int num_reg_arguments, int num_double_arguments,
567 SetIsolateDataSlots set_isolate_data_slots = SetIsolateDataSlots::kYes,
568 Label* return_location = nullptr);
569
570 // See comments at the beginning of Builtins::Generate_CEntry.
571 inline void PrepareCEntryArgs(int num_args) { li(a0, num_args); }
572 inline void PrepareCEntryFunction(const ExternalReference& ref) {
573 li(a1, ref);
574 }
575
576 void CheckPageFlag(Register object, int mask, Condition cc,
577 Label* condition_met);
578#undef COND_ARGS
579
580 // Performs a truncating conversion of a floating point number as used by
581 // the JS bitwise operations. See ECMA-262 9.5: ToInt32.
582 // Exits with 'result' holding the answer.
583 void TruncateDoubleToI(Isolate* isolate, Zone* zone, Register result,
584 DoubleRegister double_input, StubCallMode stub_mode);
585
586 // Conditional move.
587 void Movz(Register rd, Register rj, Register rk);
588 void Movn(Register rd, Register rj, Register rk);
589
592
593 void LoadZeroIfConditionNotZero(Register dest, Register condition);
594 void LoadZeroIfConditionZero(Register dest, Register condition);
595
596 void Clz_w(Register rd, Register rj);
597 void Clz_d(Register rd, Register rj);
598 void Ctz_w(Register rd, Register rj);
599 void Ctz_d(Register rd, Register rj);
602
603 void ExtractBits(Register dest, Register source, Register pos, int size,
604 bool sign_extend = false);
605 void InsertBits(Register dest, Register source, Register pos, int size);
606
607 void Bstrins_w(Register rk, Register rj, uint16_t msbw, uint16_t lswb);
608 void Bstrins_d(Register rk, Register rj, uint16_t msbw, uint16_t lsbw);
609 void Bstrpick_w(Register rk, Register rj, uint16_t msbw, uint16_t lsbw);
610 void Bstrpick_d(Register rk, Register rj, uint16_t msbw, uint16_t lsbw);
611 void Neg_s(FPURegister fd, FPURegister fj);
612 void Neg_d(FPURegister fd, FPURegister fk);
613
614 // Convert single to unsigned word.
617
618 // Change endianness
619 void ByteSwap(Register dest, Register src, int operand_size);
620
621 void Ld_b(Register rd, const MemOperand& rj);
622 void Ld_bu(Register rd, const MemOperand& rj);
623 void St_b(Register rd, const MemOperand& rj);
624
625 void Ld_h(Register rd, const MemOperand& rj);
626 void Ld_hu(Register rd, const MemOperand& rj);
627 void St_h(Register rd, const MemOperand& rj);
628
629 void Ld_w(Register rd, const MemOperand& rj);
630 void Ld_wu(Register rd, const MemOperand& rj);
631 void St_w(Register rd, const MemOperand& rj);
632
633 void Fld_s(FPURegister fd, const MemOperand& src);
634 void Fst_s(FPURegister fj, const MemOperand& dst);
635
636 void Fld_d(FPURegister fd, const MemOperand& src);
637 void Fst_d(FPURegister fj, const MemOperand& dst);
638
639 void Ll_w(Register rd, const MemOperand& rj);
640 void Sc_w(Register rd, const MemOperand& rj);
641
642 void Ll_d(Register rd, const MemOperand& rj);
643 void Sc_d(Register rd, const MemOperand& rj);
644
645 // These functions assume (and assert) that src1!=src2. It is permitted
646 // for the result to alias either input register.
648 Label* out_of_line);
650 Label* out_of_line);
652 Label* out_of_line);
654 Label* out_of_line);
655
656 // Generate out-of-line cases for the macros above.
661
662 bool IsDoubleZeroRegSet() { return has_double_zero_reg_set_; }
663
664 void mov(Register rd, Register rj) { or_(rd, rj, zero_reg); }
665
666 inline void Move(Register dst, Handle<HeapObject> handle) { li(dst, handle); }
667 inline void Move(Register dst, Tagged<Smi> value) { li(dst, Operand(value)); }
668
669 inline void Move(Register dst, Register src) {
670 if (dst != src) {
671 mov(dst, src);
672 }
673 }
675
676 inline void FmoveLow(Register dst_low, FPURegister src) {
677 movfr2gr_s(dst_low, src);
678 }
679
680 void FmoveLow(FPURegister dst, Register src_low);
681
682 inline void Move(FPURegister dst, FPURegister src) { Move_d(dst, src); }
683
684 inline void Move_d(FPURegister dst, FPURegister src) {
685 if (dst != src) {
686 fmov_d(dst, src);
687 }
688 }
689
690 inline void Move_s(FPURegister dst, FPURegister src) {
691 if (dst != src) {
692 fmov_s(dst, src);
693 }
694 }
695
696 void Move(FPURegister dst, float imm) {
697 Move(dst, base::bit_cast<uint32_t>(imm));
698 }
699 void Move(FPURegister dst, double imm) {
700 Move(dst, base::bit_cast<uint64_t>(imm));
701 }
702 void Move(FPURegister dst, uint32_t src);
703 void Move(FPURegister dst, uint64_t src);
704
705 // AddOverflow_d sets overflow register to a negative value if
706 // overflow occured, otherwise it is zero or positive
707 void AddOverflow_d(Register dst, Register left, const Operand& right,
708 Register overflow);
709 // SubOverflow_d sets overflow register to a negative value if
710 // overflow occured, otherwise it is zero or positive
711 void SubOverflow_d(Register dst, Register left, const Operand& right,
712 Register overflow);
713 // MulOverflow_{w/d} set overflow register to zero if no overflow occured
714 void MulOverflow_w(Register dst, Register left, const Operand& right,
715 Register overflow);
716 void MulOverflow_d(Register dst, Register left, const Operand& right,
717 Register overflow);
718
719 // TODO(LOONG_dev): LOONG64 Remove this constant
720 // Number of instructions needed for calculation of switch table entry address
721 static const int kSwitchTablePrologueSize = 5;
722
723 // GetLabelFunction must be lambda '[](size_t index) -> Label*' or a
724 // functor/function with 'Label *func(size_t index)' declaration.
725 template <typename Func>
726 void GenerateSwitchTable(Register index, size_t case_count,
727 Func GetLabelFunction);
728
729 // Load an object from the root table.
732 Register src1, const Operand& src2);
734
737
738 void LoadFeedbackVector(Register dst, Register closure, Register scratch,
739 Label* fbv_undef);
740
741 // If the value is a NaN, canonicalize the value else, do nothing.
742 void FPUCanonicalizeNaN(const DoubleRegister dst, const DoubleRegister src);
743
744 // ---------------------------------------------------------------------------
745 // FPU macros. These do not handle special cases like NaN or +- inf.
746
747 // Convert unsigned word to double.
750
751 // Convert unsigned long to double.
754
755 // Convert unsigned word to float.
758
759 // Convert unsigned long to float.
762
763 // Convert double to unsigned word.
766
767 // Convert single to unsigned word.
770
771 // Convert double to unsigned long.
773 Register result = no_reg);
775 Register result = no_reg);
776
777 // Convert single to unsigned long.
779 Register result = no_reg);
781 Register result = no_reg);
782
783 // Round double functions
788
789 // Round float functions
794
795 // Jump the register contains a smi.
796 void JumpIfSmi(Register value, Label* smi_label);
797
798 void JumpIfEqual(Register a, int32_t b, Label* dest) {
799 UseScratchRegisterScope temps(this);
800 Register scratch = temps.Acquire();
801 li(scratch, Operand(b));
802 Branch(dest, eq, a, Operand(scratch));
803 }
804
805 void JumpIfLessThan(Register a, int32_t b, Label* dest) {
806 UseScratchRegisterScope temps(this);
807 Register scratch = temps.Acquire();
808 li(scratch, Operand(b));
809 Branch(dest, lt, a, Operand(scratch));
810 }
811
812 // Push a standard frame, consisting of ra, fp, context and JS function.
813 void PushStandardFrame(Register function_reg);
814
815 // Get the actual activation frame alignment for target environment.
817
818 // Load Scaled Address instructions. Parameter sa (shift argument) must be
819 // between [1, 31] (inclusive).
820 void Alsl_w(Register rd, Register rj, Register rk, uint8_t sa);
821 void Alsl_d(Register rd, Register rj, Register rk, uint8_t sa);
822
823 // Compute the start of the generated instruction stream from the current PC.
824 // This is an alternative to embedding the {CodeObject} handle as a reference.
826
827 // Control-flow integrity:
828
829 // Define a function entrypoint. This doesn't emit any code for this
830 // architecture, as control-flow integrity is not supported for it.
831 void CodeEntry() {}
832 // Define an exception handler.
834 // Define an exception handler and bind a label.
836
837 // ---------------------------------------------------------------------------
838 // Pointer compression Support
839
840 // Loads a field containing any tagged value and decompresses it if necessary.
841 void LoadTaggedField(Register destination, const MemOperand& field_operand);
842
843 // Loads a field containing a tagged signed value and decompresses it if
844 // necessary.
846 const MemOperand& field_operand);
847
848 // Loads a field containing smi value and untags it.
849 void SmiUntagField(Register dst, const MemOperand& src);
850
851 // Compresses and stores tagged value to given on-heap location.
852 void StoreTaggedField(Register src, const MemOperand& dst);
853
855
857 void DecompressTagged(Register dst, const MemOperand& src);
859 void DecompressTagged(Register dst, Tagged_t immediate);
861 const MemOperand& field_operand);
862
865
866 // ---------------------------------------------------------------------------
867 // V8 Sandbox support
868
869 // Transform a SandboxedPointer from/to its encoded form, which is used when
870 // the pointer is stored on the heap and ensures that the pointer will always
871 // point into the sandbox.
874 MemOperand field_operand);
875 void StoreSandboxedPointerField(Register value, MemOperand dst_field_operand);
876
877 // Loads a field containing an off-heap ("external") pointer and does
878 // necessary decoding if sandbox is enabled.
880 ExternalPointerTagRange tag_range,
881 Register isolate_root = no_reg);
882
883 // Load a trusted pointer field.
884 // When the sandbox is enabled, these are indirect pointers using the trusted
885 // pointer table. Otherwise they are regular tagged fields.
888
889 // Store a trusted pointer field.
890 void StoreTrustedPointerField(Register value, MemOperand dst_field_operand);
891
892 // Load a code pointer field.
893 // These are special versions of trusted pointers that, when the sandbox is
894 // enabled, reference code objects through the code pointer table.
896 LoadTrustedPointerField(destination, field_operand,
897 kCodeIndirectPointerTag);
898 }
899 // Store a code pointer field.
900 void StoreCodePointerField(Register value, MemOperand dst_field_operand) {
901 StoreTrustedPointerField(value, dst_field_operand);
902 }
903
904 // Loads an indirect pointer field.
905 // Only available when the sandbox is enabled, but always visible to avoid
906 // having to place the #ifdefs into the caller.
909
910 // Store an indirect pointer field.
911 // Only available when the sandbox is enabled, but always visible to avoid
912 // having to place the #ifdefs into the caller.
913 void StoreIndirectPointerField(Register value, MemOperand dst_field_operand);
914
915#ifdef V8_ENABLE_SANDBOX
916 // Retrieve the heap object referenced by the given indirect pointer handle,
917 // which can either be a trusted pointer handle or a code pointer handle.
918 void ResolveIndirectPointerHandle(Register destination, Register handle,
920
921 // Retrieve the heap object referenced by the given trusted pointer handle.
922 void ResolveTrustedPointerHandle(Register destination, Register handle,
924 // Retrieve the Code object referenced by the given code pointer handle.
925 void ResolveCodePointerHandle(Register destination, Register handle);
926
927 // Load the pointer to a Code's entrypoint via a code pointer.
928 // Only available when the sandbox is enabled as it requires the code pointer
929 // table.
930 void LoadCodeEntrypointViaCodePointer(Register destination,
931 MemOperand field_operand,
933
934 // Load the value of Code pointer table corresponding to
935 // IsolateGroup::current()->code_pointer_table_.
936 // Only available when the sandbox is enabled.
937 void LoadCodePointerTableBase(Register destination);
938#endif
939
940#ifdef V8_ENABLE_LEAPTIERING
941 void LoadEntrypointFromJSDispatchTable(Register destination,
942 Register dispatch_handle,
943 Register scratch);
944 void LoadEntrypointFromJSDispatchTable(Register destination,
945 JSDispatchHandle dispatch_handle,
946 Register scratch);
947 void LoadParameterCountFromJSDispatchTable(Register destination,
948 Register dispatch_handle,
949 Register scratch);
950 void LoadEntrypointAndParameterCountFromJSDispatchTable(
951 Register entrypoint, Register parameter_count, Register dispatch_handle,
952 Register scratch);
953#endif // V8_ENABLE_LEAPTIERING
954
955 // Load a protected pointer field.
957 MemOperand field_operand);
958
959 // Performs a truncating conversion of a floating point number as used by
960 // the JS bitwise operations. See ECMA-262 9.5: ToInt32. Goes to 'done' if it
961 // succeeds, otherwise falls through if result is saturated. On return
962 // 'result' either holds answer, or is clobbered on fall through.
963 void TryInlineTruncateDoubleToI(Register result, DoubleRegister input,
964 Label* done);
965
966 // It assumes that the arguments are located below the stack pointer.
967 void LoadReceiver(Register dest) { Ld_d(dest, MemOperand(sp, 0)); }
968 void StoreReceiver(Register rec) { St_d(rec, MemOperand(sp, 0)); }
969
970 bool IsNear(Label* L, Condition cond, int rs_reg);
971
972 // Swap two registers. If the scratch register is omitted then a slightly
973 // less efficient form using xor instead of mov is emitted.
974 void Swap(Register reg1, Register reg2, Register scratch = no_reg);
975
977 Register scratch,
978 Condition cond, Label* target);
980
981 void PushRoot(RootIndex index) {
982 UseScratchRegisterScope temps(this);
983 Register scratch = temps.Acquire();
984 LoadRoot(scratch, index);
985 Push(scratch);
986 }
987
988 // Compare the object in a register to a value from the root list.
989 void CompareRootAndBranch(const Register& obj, RootIndex index, Condition cc,
990 Label* target,
992 void CompareTaggedRootAndBranch(const Register& with, RootIndex index,
993 Condition cc, Label* target);
994
995 // Compare the object in a register to a value and jump if they are equal.
996 void JumpIfRoot(Register with, RootIndex index, Label* if_equal) {
997 Branch(if_equal, eq, with, index);
998 }
999
1000 // Compare the object in a register to a value and jump if they are not equal.
1001 void JumpIfNotRoot(Register with, RootIndex index, Label* if_not_equal) {
1002 Branch(if_not_equal, ne, with, index);
1003 }
1004
1005 // Checks if value is in range [lower_limit, higher_limit] using a single
1006 // comparison.
1007 void JumpIfIsInRange(Register value, unsigned lower_limit,
1008 unsigned higher_limit, Label* on_in_range);
1009
1010 void JumpIfObjectType(Label* target, Condition cc, Register object,
1011 InstanceType instance_type, Register scratch = no_reg);
1012 // Fast check if the object is a js receiver type. Assumes only primitive
1013 // objects or js receivers are passed.
1015 Register heap_object, Register scratch, Label* target,
1016 Label::Distance distance = Label::kFar,
1017 Condition condition = Condition::kUnsignedGreaterThanEqual);
1018 void JumpIfJSAnyIsPrimitive(Register heap_object, Register scratch,
1019 Label* target,
1020 Label::Distance distance = Label::kFar) {
1021 return JumpIfJSAnyIsNotPrimitive(heap_object, scratch, target, distance,
1022 Condition::kUnsignedLessThan);
1023 }
1024
1025 // ---------------------------------------------------------------------------
1026 // GC Support
1027
1028 // Notify the garbage collector that we wrote a pointer into an object.
1029 // |object| is the object being stored into, |value| is the object being
1030 // stored.
1031 // The offset is the offset from the start of the object, not the offset from
1032 // the tagged HeapObject pointer. For use with FieldOperand(reg, off).
1034 Register object, int offset, Register value, RAStatus ra_status,
1035 SaveFPRegsMode save_fp, SmiCheck smi_check = SmiCheck::kInline,
1036 SlotDescriptor slot = SlotDescriptor::ForDirectPointerSlot());
1037
1038 // For a given |object| notify the garbage collector that the slot at |offset|
1039 // has been written. |value| is the object being stored.
1041 Register object, Operand offset, Register value, RAStatus ra_status,
1042 SaveFPRegsMode save_fp, SmiCheck smi_check = SmiCheck::kInline,
1043 SlotDescriptor slot = SlotDescriptor::ForDirectPointerSlot());
1044
1045 // ---------------------------------------------------------------------------
1046 // Pseudo-instructions.
1047
1048 // Convert double to unsigned long.
1050
1055
1060
1061 void Madd_s(FPURegister fd, FPURegister fa, FPURegister fj, FPURegister fk);
1062 void Madd_d(FPURegister fd, FPURegister fa, FPURegister fj, FPURegister fk);
1063 void Msub_s(FPURegister fd, FPURegister fa, FPURegister fj, FPURegister fk);
1064 void Msub_d(FPURegister fd, FPURegister fa, FPURegister fj, FPURegister fk);
1065
1066 // Enter exit frame.
1067 // stack_space - extra stack space.
1068 void EnterExitFrame(Register scratch, int stack_space,
1069 StackFrame::Type frame_type);
1070
1071 // Leave the current exit frame.
1073
1074 // Make sure the stack is aligned. Only emits code in debug mode.
1075 void AssertStackIsAligned() NOOP_UNLESS_DEBUG_CODE;
1076
1077 // Load the global proxy from the current context.
1078 void LoadGlobalProxy(Register dst) {
1079 LoadNativeContextSlot(dst, Context::GLOBAL_PROXY_INDEX);
1080 }
1081
1082 void LoadNativeContextSlot(Register dst, int index);
1083
1084 // Load the initial map from the global function. The registers
1085 // function and map can be the same, function is then overwritten.
1087 Register scratch);
1088
1089 // -------------------------------------------------------------------------
1090 // JavaScript invokes.
1091
1092 // On function call, call into the debugger.
1095 Register expected_parameter_count_or_dispatch_handle,
1096 Register actual_parameter_count);
1097
1098 // The way we invoke JSFunctions differs depending on whether leaptiering is
1099 // enabled. As such, these functions exist in two variants. In the future,
1100 // leaptiering will be used on all platforms. At that point, the
1101 // non-leaptiering variants will disappear.
1102
1103#ifdef V8_ENABLE_LEAPTIERING
1104 // Invoke the JavaScript function in the given register. Changes the
1105 // current context to the context in the function before invoking.
1106 void InvokeFunction(Register function, Register actual_parameter_count,
1107 InvokeType type,
1108 ArgumentAdaptionMode argument_adaption_mode =
1110 // Invoke the JavaScript function in the given register.
1111 // Changes the current context to the context in the function before invoking.
1112 void InvokeFunctionWithNewTarget(Register function, Register new_target,
1113 Register actual_parameter_count,
1114 InvokeType type);
1115 // Invoke the JavaScript function code by either calling or jumping.
1116 void InvokeFunctionCode(Register function, Register new_target,
1117 Register actual_parameter_count, InvokeType type,
1118 ArgumentAdaptionMode argument_adaption_mode =
1120#else
1121 void InvokeFunction(Register function, Register expected_parameter_count,
1122 Register actual_parameter_count, InvokeType type);
1123 // Invoke the JavaScript function in the given register. Changes the
1124 // current context to the context in the function before invoking.
1126 Register actual_parameter_count,
1127 InvokeType type);
1128 // Invoke the JavaScript function code by either calling or jumping.
1130 Register expected_parameter_count,
1131 Register actual_parameter_count, InvokeType type);
1132#endif
1133
1134 // Exception handling.
1135
1136 // Push a new stack handler and link into stack handler chain.
1138
1139 // Unlink the stack handler on top of the stack from the stack handler chain.
1140 // Must preserve the result register.
1142
1143 // -------------------------------------------------------------------------
1144 // Support functions.
1145
1146 void GetObjectType(Register function, Register map, Register type_reg);
1147
1148 void GetInstanceTypeRange(Register map, Register type_reg,
1149 InstanceType lower_limit, Register range);
1150
1151 // -------------------------------------------------------------------------
1152 // Runtime calls.
1153
1154 // Call a runtime routine.
1155 void CallRuntime(const Runtime::Function* f, int num_arguments);
1156
1157 // Convenience function: Same as above, but takes the fid instead.
1159 const Runtime::Function* function = Runtime::FunctionForId(fid);
1160 CallRuntime(function, function->nargs);
1161 }
1162
1163 // Convenience function: Same as above, but takes the fid instead.
1164 void CallRuntime(Runtime::FunctionId fid, int num_arguments) {
1165 CallRuntime(Runtime::FunctionForId(fid), num_arguments);
1166 }
1167
1168 // Convenience function: tail call a runtime routine (jump).
1170
1171 // Jump to the builtin routine.
1173 bool builtin_exit_frame = false);
1174
1175 // ---------------------------------------------------------------------------
1176 // In-place weak references.
1177 void LoadWeakValue(Register out, Register in, Label* target_if_cleared);
1178
1179 // -------------------------------------------------------------------------
1180 // StatsCounter support.
1181
1182 void IncrementCounter(StatsCounter* counter, int value, Register scratch1,
1183 Register scratch2) {
1184 if (!v8_flags.native_code_counters) return;
1185 EmitIncrementCounter(counter, value, scratch1, scratch2);
1186 }
1187 void EmitIncrementCounter(StatsCounter* counter, int value, Register scratch1,
1188 Register scratch2);
1189 void DecrementCounter(StatsCounter* counter, int value, Register scratch1,
1190 Register scratch2) {
1191 if (!v8_flags.native_code_counters) return;
1192 EmitDecrementCounter(counter, value, scratch1, scratch2);
1193 }
1194 void EmitDecrementCounter(StatsCounter* counter, int value, Register scratch1,
1195 Register scratch2);
1196
1197 // -------------------------------------------------------------------------
1198 // Stack limit utilities
1199
1202 void StackOverflowCheck(Register num_args, Register scratch1,
1203 Register scratch2, Label* stack_overflow);
1204
1205 // ---------------------------------------------------------------------------
1206 // Smi utilities.
1207
1208 // Test if the register contains a smi.
1209 inline void SmiTst(Register value, Register scratch) {
1210 And(scratch, value, Operand(kSmiTagMask));
1211 }
1212
1213 // Jump if the register contains a non-smi.
1214 void JumpIfNotSmi(Register value, Label* not_smi_label);
1215
1216 // Abort execution if argument is not a Constructor, enabled via --debug-code.
1218
1219 // Abort execution if argument is not a JSFunction, enabled via --debug-code.
1221
1222 // Abort execution if argument is not a callable JSFunction, enabled via
1223 // --debug-code.
1225
1226 // Abort execution if argument is not a JSBoundFunction,
1227 // enabled via --debug-code.
1229
1230 // Abort execution if argument is not a JSGeneratorObject (or subclass),
1231 // enabled via --debug-code.
1233
1234 // Like Assert(), but without condition.
1235 // Use --debug_code to enable.
1237
1238 // Abort execution if argument is not undefined or an AllocationSite, enabled
1239 // via --debug-code.
1242
1243 // ---------------------------------------------------------------------------
1244 // Tiering support.
1250 Register closure);
1252
1253#ifndef V8_ENABLE_LEAPTIERING
1255 Register flags, Register feedback_vector, CodeKind current_code_kind,
1256 Label* flags_need_processing);
1258 Register feedback_vector);
1259#endif // !V8_ENABLE_LEAPTIERING
1260
1261 template <typename Field>
1263 Bstrpick_d(dst, src, Field::kShift + Field::kSize - 1, Field::kShift);
1264 }
1265
1266 template <typename Field>
1268 DecodeField<Field>(reg, reg);
1269 }
1270
1271 protected:
1273 inline int32_t GetOffset(Label* L, OffsetSize bits);
1274
1275 private:
1276 bool has_double_zero_reg_set_ = false;
1277
1278 // Helper functions for generating invokes.
1279 void InvokePrologue(Register expected_parameter_count,
1280 Register actual_parameter_count, InvokeType type);
1281
1282 // Performs a truncating conversion of a floating point number as used by
1283 // the JS bitwise operations. See ECMA-262 9.5: ToInt32. Goes to 'done' if it
1284 // succeeds, otherwise falls through if result is saturated. On return
1285 // 'result' either holds answer, or is clobbered on fall through.
1286
1288 const Operand& rk, bool need_link);
1289
1290 // f32 or f64
1292 CFRegister cd, bool f32 = true);
1293
1295 bool f32 = true);
1296
1297 int CallCFunctionHelper(
1298 Register function, int num_reg_arguments, int num_double_arguments,
1299 SetIsolateDataSlots set_isolate_data_slots = SetIsolateDataSlots::kYes,
1300 Label* return_location = nullptr);
1301
1303
1305
1306 // Push a fixed frame, consisting of ra, fp.
1307 void PushCommonFrame(Register marker_reg = no_reg);
1308
1310};
1311
1312template <typename Func>
1313void MacroAssembler::GenerateSwitchTable(Register index, size_t case_count,
1314 Func GetLabelFunction) {
1315 UseScratchRegisterScope scope(this);
1316 Register scratch = scope.Acquire();
1317 BlockTrampolinePoolFor(3 + case_count);
1318
1319 pcaddi(scratch, 3);
1320 alsl_d(scratch, index, scratch, kInstrSizeLog2);
1321 jirl(zero_reg, scratch, 0);
1322 for (size_t index = 0; index < case_count; ++index) {
1323 b(GetLabelFunction(index));
1324 }
1325}
1326
1327struct MoveCycleState {
1328 // List of scratch registers reserved for pending moves in a move cycle, and
1329 // which should therefore not be used as a temporary location by
1330 // {MoveToTempLocation}.
1333 // Available scratch registers during the move cycle resolution scope.
1334 std::optional<UseScratchRegisterScope> temps;
1335 // Scratch register picked by {MoveToTempLocation}.
1336 std::optional<Register> scratch_reg;
1337 std::optional<DoubleRegister> scratch_fpreg;
1338};
1339
1340// Provides access to exit frame parameters (GC-ed).
1342 // The slot at [sp] is reserved in all ExitFrames for storing the return
1343 // address before doing the actual call, it's necessary for frame iteration
1344 // (see StoreReturnAddressAndCall for details).
1345 static constexpr int kSPOffset = 1 * kSystemPointerSize;
1346 return MemOperand(sp, kSPOffset + offset);
1347}
1348
1349// Provides access to exit frame parameters (GC-ed).
1353}
1354
1355// Calls an API function. Allocates HandleScope, extracts returned value
1356// from handle and propagates exceptions. Clobbers C argument registers
1357// and C caller-saved registers. Restores context. On return removes
1358// (*argc_operand + slots_to_drop_on_return) * kSystemPointerSize
1359// (GCed, includes the call JS arguments space and the additional space
1360// allocated for the fast call).
1361void CallApiFunctionAndReturn(MacroAssembler* masm, bool with_profiling,
1362 Register function_address,
1363 ExternalReference thunk_ref, Register thunk_arg,
1364 int slots_to_drop_on_return,
1365 MemOperand* argc_operand,
1366 MemOperand return_value_operand);
1367
1368} // namespace internal
1369} // namespace v8
1370
1371#define ACCESS_MASM(masm) masm->
1372
1373#endif // V8_CODEGEN_LOONG64_MACRO_ASSEMBLER_LOONG64_H_
#define Assert(condition)
int16_t parameter_count
Definition builtins.cc:67
interpreter::OperandScale scale
Definition builtins.cc:44
Builtins::Kind kind
Definition builtins.cc:40
SourcePosition pos
void b(int branch_offset, Condition cond=al, RelocInfo::Mode rmode=RelocInfo::NO_INFO)
void alsl_d(Register rd, Register rj, Register rk, int32_t sa2)
void jirl(Register rd, Register rj, int32_t offset)
void BlockTrampolinePoolFor(int instructions)
void pcaddi(Register rd, int32_t si20)
static constexpr int kFixedSlotCountAboveFp
void JumpIfJSAnyIsPrimitive(Register heap_object, Register scratch, Label *target, Label::Distance distance=Label::kFar)
void JumpIfRoot(Register with, RootIndex index, Label *if_equal)
void Push(Register src1, Register src2, Register src3, Register src4, Register src5)
void GenerateSwitchTable(Register index, size_t case_count, Func GetLabelFunction)
void Abort(AbortReason msg)
void Ld_b(Register rd, const MemOperand &rj)
void LoadStackLimit(Register destination, StackLimitKind kind)
void Ftintrp_w_d(FPURegister fd, FPURegister fj)
void Pop(Register src1, Register src2, Register src3)
void CallJSFunction(Register function_object, uint16_t argument_count)
void LiLower32BitHelper(Register rd, Operand j)
void Round_d(FPURegister fd, FPURegister fj)
void St_w(Register rd, const MemOperand &rj)
void AddOverflow_d(Register dst, Register left, const Operand &right, Register overflow)
bool IsNear(Label *L, Condition cond, int rs_reg)
int CallCFunction(ExternalReference function, int num_reg_arguments, int num_double_arguments, SetIsolateDataSlots set_isolate_data_slots=SetIsolateDataSlots::kYes, Label *return_location=nullptr)
void ExtractBits(Register dest, Register source, Register pos, int size, bool sign_extend=false)
void Ffint_d_ul(FPURegister fd, Register rj)
int CallCFunction(Register function, int num_reg_arguments, int num_double_arguments, SetIsolateDataSlots set_isolate_data_slots=SetIsolateDataSlots::kYes, Label *return_location=nullptr)
void MultiPush(RegList regs1, RegList regs2, RegList regs3)
void Move(FPURegister dst, float imm)
void Trunc_uw_s(FPURegister fd, FPURegister fj, FPURegister scratch)
void Ftintrm_l_d(FPURegister fd, FPURegister fj)
void Move(FPURegister dst, FPURegister src)
void CallRuntime(Runtime::FunctionId fid)
int CalculateStackPassedWords(int num_reg_arguments, int num_double_arguments)
void Round_s(FPURegister fd, FPURegister fj)
void Floor_d(FPURegister fd, FPURegister fj)
void Fst_d(FPURegister fj, const MemOperand &dst)
void EnterFrame(StackFrame::Type type)
void mov(Register rd, Register rj)
void Move(FPURegister dst, double imm)
void DecodeField(Register dst, Register src)
void AssertFunction(Register object) NOOP_UNLESS_DEBUG_CODE
void Call(Register target, COND_ARGS)
void Float64Min(FPURegister dst, FPURegister src1, FPURegister src2, Label *out_of_line)
void AssertGeneratorObject(Register object) NOOP_UNLESS_DEBUG_CODE
void CompareIsNanF64(FPURegister cmp1, FPURegister cmp2, CFRegister cd=FCC0)
void JumpIfNotRoot(Register with, RootIndex index, Label *if_not_equal)
void Sc_d(Register rd, const MemOperand &rj)
void li(Register rd, int32_t j, LiFlags mode=OPTIMIZE_SIZE)
void CompareF32(FPURegister cmp1, FPURegister cmp2, FPUCondition cc, CFRegister cd=FCC0)
void LoadEntryFromBuiltin(Builtin builtin, Register destination)
void TestCodeIsMarkedForDeoptimizationAndJump(Register code_data_container, Register scratch, Condition cond, Label *target)
void PushStandardFrame(Register function_reg)
void BranchFalseF(Label *target, CFRegister cc=FCC0)
void RecordWrite(Register object, Operand offset, Register value, RAStatus ra_status, SaveFPRegsMode save_fp, SmiCheck smi_check=SmiCheck::kInline, SlotDescriptor slot=SlotDescriptor::ForDirectPointerSlot())
void LoadZeroIfNotFPUCondition(Register dest, CFRegister=FCC0)
int CallCFunction(ExternalReference function, int num_arguments, SetIsolateDataSlots set_isolate_data_slots=SetIsolateDataSlots::kYes, Label *return_location=nullptr)
void Ffint_d_uw(FPURegister fd, Register rj)
void Move(Register dst, Register src)
void DecompressTaggedSigned(Register dst, const MemOperand &src)
void Jump(intptr_t target, RelocInfo::Mode rmode, COND_ARGS)
void CompareIsNanF(FPURegister cmp1, FPURegister cmp2, CFRegister cd, bool f32=true)
void JumpIfEqual(Register a, int32_t b, Label *dest)
void Clz_d(Register rd, Register rj)
void Ffint_s_uw(FPURegister fd, Register rj)
void Bstrpick_w(Register rk, Register rj, uint16_t msbw, uint16_t lsbw)
void Bstrins_d(Register rk, Register rj, uint16_t msbw, uint16_t lsbw)
void StoreReturnAddressAndCall(Register target)
void DecompressTagged(Register dst, Tagged_t immediate)
void Move(Register dst, Tagged< Smi > value)
void Ftintrp_l_d(FPURegister fd, FPURegister fj)
void FmoveLow(FPURegister dst, Register src_low)
void Float64Max(FPURegister dst, FPURegister src1, FPURegister src2, Label *out_of_line)
void LeaveExitFrame(Register scratch)
void AssertFeedbackVector(Register object, Register scratch) NOOP_UNLESS_DEBUG_CODE
void CallBuiltinByIndex(Register builtin_index, Register target)
int32_t GetOffset(Label *L, OffsetSize bits)
void LoadTrustedPointerField(Register destination, MemOperand field_operand, IndirectPointerTag tag)
void MulOverflow_w(Register dst, Register left, const Operand &right, Register overflow)
void Movz(Register rd, Register rj, Register rk)
void LoadRootRelative(Register destination, int32_t offset) final
void JumpIfSmi(Register value, Label *smi_label)
void Ld_d(Register rd, const MemOperand &rj)
void Ftintrz_ul_s(FPURegister fd, FPURegister fj, FPURegister scratch, Register result=no_reg)
static int ActivationFrameAlignment()
void Push(Handle< HeapObject > handle)
void PrepareCallCFunction(int num_reg_arguments, Register scratch)
bool BranchShortOrFallback(Label *L, Condition cond, Register rj, const Operand &rk, bool need_link)
void LoadSandboxedPointerField(Register destination, MemOperand field_operand)
void AssertUnreachable(AbortReason reason) NOOP_UNLESS_DEBUG_CODE
void Alsl_d(Register rd, Register rj, Register rk, uint8_t sa)
void Jump(Register target, COND_ARGS)
void Ll_d(Register rd, const MemOperand &rj)
void LoadRootRegisterOffset(Register destination, intptr_t offset) final
void Ftintrz_ul_s(Register rd, FPURegister fj, FPURegister scratch, Register result=no_reg)
void Ld_bu(Register rd, const MemOperand &rj)
void AssertSmi(Register object) NOOP_UNLESS_DEBUG_CODE
void AtomicDecompressTagged(Register dst, const MemOperand &src)
void BranchFalseShortF(Label *target, CFRegister cc=FCC0)
void SmiUntag(Register dst, Register src)
void Alsl_w(Register rd, Register rj, Register rk, uint8_t sa)
void Ftintrz_ul_d(FPURegister fd, FPURegister fj, FPURegister scratch, Register result=no_reg)
void LoadFeedbackVectorFlagsAndJumpIfNeedsProcessing(Register flags, Register feedback_vector, CodeKind current_code_kind, Label *flags_need_processing)
void LoadFeedbackVector(Register dst, Register closure, Register scratch, Label *fbv_undef)
void EmitIncrementCounter(StatsCounter *counter, int value, Register scratch1, Register scratch2)
int CallCFunction(Register function, int num_arguments, SetIsolateDataSlots set_isolate_data_slots=SetIsolateDataSlots::kYes, Label *return_location=nullptr)
void TailCallBuiltin(Builtin builtin)
void LoadGlobalFunctionInitialMap(Register function, Register map, Register scratch)
void Movn(Register rd, Register rj, Register rk)
void BranchTrueF(Label *target, CFRegister cc=FCC0)
void InvokeFunctionCode(Register function, Register new_target, Register expected_parameter_count, Register actual_parameter_count, InvokeType type)
void Fld_d(FPURegister fd, const MemOperand &src)
void Ll_w(Register rd, const MemOperand &rj)
void DecodeSandboxedPointer(Register value)
void Branch(Label *L, Condition cond, Register rj, RootIndex index, bool need_sign_extend=true)
int RequiredStackSizeForCallerSaved(SaveFPRegsMode fp_mode, Register exclusion1=no_reg, Register exclusion2=no_reg, Register exclusion3=no_reg) const
void Move_d(FPURegister dst, FPURegister src)
void Jump(const ExternalReference &reference)
void JumpIfJSAnyIsNotPrimitive(Register heap_object, Register scratch, Label *target, Label::Distance distance=Label::kFar, Condition condition=Condition::kUnsignedGreaterThanEqual)
MemOperand ExternalReferenceAsOperand(IsolateFieldId id)
void Ffint_s_ul(FPURegister fd, Register rj)
void LoadCodeInstructionStart(Register destination, Register code_object, CodeEntrypointTag tag)
bool CanUseNearCallOrJump(RelocInfo::Mode rmode)
void InvokePrologue(Register expected_parameter_count, Register actual_parameter_count, InvokeType type)
void RoundFloat(FPURegister dst, FPURegister src, FPURoundingMode mode)
void Ftintrne_l_d(FPURegister fd, FPURegister fj)
void Push(Register src1, Register src2, Register src3)
void Call(Address target, RelocInfo::Mode rmode, COND_ARGS)
void Ld_h(Register rd, const MemOperand &rj)
void Ctz_w(Register rd, Register rj)
void Ftintrz_l_ud(FPURegister fd, FPURegister fj, FPURegister scratch)
void Move(Register dst, Handle< HeapObject > handle)
void Ffint_s_uw(FPURegister fd, FPURegister fj)
void EnterExitFrame(Register scratch, int stack_space, StackFrame::Type frame_type)
void SmiTag(Register dst, Register src)
void Clz_w(Register rd, Register rj)
void CompareWord(Condition cond, Register dst, Register lhs, const Operand &rhs)
void Fst_s(FPURegister fj, const MemOperand &dst)
MemOperand ExternalReferenceAsOperand(ExternalReference reference, Register scratch)
void CallBuiltin(Builtin builtin)
void IncrementCounter(StatsCounter *counter, int value, Register scratch1, Register scratch2)
void Move(FPURegister dst, uint64_t src)
void PushCommonFrame(Register marker_reg=no_reg)
void LoadIndirectPointerField(Register destination, MemOperand field_operand, IndirectPointerTag tag)
void Trunc_d(FPURegister fd, FPURegister fj)
void CallIndirectPointerBarrier(Register object, Operand offset, SaveFPRegsMode fp_mode, IndirectPointerTag tag)
void RoundDouble(FPURegister dst, FPURegister src, FPURoundingMode mode)
void LeaveFrame(StackFrame::Type type)
void Ftintrz_ul_d(Register rd, FPURegister fj, FPURegister scratch, Register result=no_reg)
int PushCallerSaved(SaveFPRegsMode fp_mode, Register exclusion1=no_reg, Register exclusion2=no_reg, Register exclusion3=no_reg)
void Ftintrz_w_d(FPURegister fd, FPURegister fj)
void Ffint_d_ul(FPURegister fd, FPURegister fj)
void JumpToExternalReference(const ExternalReference &builtin, bool builtin_exit_frame=false)
void Ftintrm_w_d(FPURegister fd, FPURegister fj)
void Float64MaxOutOfLine(FPURegister dst, FPURegister src1, FPURegister src2)
Operand ClearedValue() const
void StackOverflowCheck(Register num_args, Register scratch1, Register scratch2, Label *stack_overflow)
void St_d(Register rd, const MemOperand &rj)
void Ceil_s(FPURegister fd, FPURegister fj)
void StoreCodePointerField(Register value, MemOperand dst_field_operand)
void Ffint_d_uw(FPURegister fd, FPURegister fj)
void ByteSwap(Register dest, Register src, int operand_size)
void Trunc_s(FPURegister fd, FPURegister fj)
void GenPCRelativeJumpAndLink(Register rd, int64_t offset)
void Sc_w(Register rd, const MemOperand &rj)
void InvokeFunctionWithNewTarget(Register function, Register new_target, Register actual_parameter_count, InvokeType type)
void li(Register rd, int64_t j, LiFlags mode=OPTIMIZE_SIZE)
void FmoveLow(Register dst_low, FPURegister src)
void LoadTaggedField(Register destination, const MemOperand &field_operand)
void LoadRoot(Register destination, RootIndex index) final
void MultiPop(RegList regs1, RegList regs2)
void DecompressProtected(const Register &destination, const MemOperand &field_operand)
void li(Register dst, Handle< HeapObject > value, RelocInfo::Mode rmode=RelocInfo::NO_INFO, LiFlags mode=OPTIMIZE_SIZE)
void Pop(Register src1, Register src2)
void Float64MinOutOfLine(FPURegister dst, FPURegister src1, FPURegister src2)
void InvokeFunction(Register function, Register expected_parameter_count, Register actual_parameter_count, InvokeType type)
void RecordWriteField(Register object, int offset, Register value, RAStatus ra_status, SaveFPRegsMode save_fp, SmiCheck smi_check=SmiCheck::kInline, SlotDescriptor slot=SlotDescriptor::ForDirectPointerSlot())
void LoadExternalPointerField(Register destination, MemOperand field_operand, ExternalPointerTagRange tag_range, Register isolate_root=no_reg)
void Float32MinOutOfLine(FPURegister dst, FPURegister src1, FPURegister src2)
void AtomicDecompressTaggedSigned(Register dst, const MemOperand &src)
void Ftintrz_l_d(FPURegister fd, FPURegister fj)
void Float32Min(FPURegister dst, FPURegister src1, FPURegister src2, Label *out_of_line)
void PushArray(Register array, Register size, Register scratch, Register scratch2, PushArrayOrder order=kNormal)
void DecrementCounter(StatsCounter *counter, int value, Register scratch1, Register scratch2)
void Ftintrne_w_d(FPURegister fd, FPURegister fj)
void Call(Handle< Code > code, RelocInfo::Mode rmode=RelocInfo::CODE_TARGET, COND_ARGS)
void CallCodeObject(Register code_object, CodeEntrypointTag tag)
void Bstrpick_d(Register rk, Register rj, uint16_t msbw, uint16_t lsbw)
void AtomicStoreTaggedField(Register dst, const MemOperand &src)
void EmitDecrementCounter(StatsCounter *counter, int value, Register scratch1, Register scratch2)
void LoadFromConstantsTable(Register destination, int constant_index) final
void LoadProtectedPointerField(Register destination, MemOperand field_operand)
MemOperand EntryFromBuiltinAsOperand(Builtin builtin)
void Trunc_uw_s(Register rd, FPURegister fj, FPURegister scratch)
void AssertNotSmi(Register object) NOOP_UNLESS_DEBUG_CODE
void PrepareCEntryFunction(const ExternalReference &ref)
void ComputeCodeStartAddress(Register dst)
void MaybeSaveRegisters(RegList registers)
void CheckPageFlag(Register object, int mask, Condition cc, Label *condition_met)
void LoadTaggedRoot(Register destination, RootIndex index)
DISALLOW_IMPLICIT_CONSTRUCTORS(MacroAssembler)
void Jump(Handle< Code > code, RelocInfo::Mode rmode, COND_ARGS)
static int64_t CalculateTargetOffset(Address target, RelocInfo::Mode rmode, uint8_t *pc)
void JumpIfNotSmi(Register value, Label *not_smi_label)
void StoreIndirectPointerField(Register value, MemOperand dst_field_operand)
void SmiTst(Register value, Register scratch)
void JumpJSFunction(Register function_object, JumpMode jump_mode=JumpMode::kJump)
Register GetRkAsRegisterHelper(const Operand &rk, Register scratch)
void DecompressTagged(Register dst, const MemOperand &src)
void MoveObjectAndSlot(Register dst_object, Register dst_slot, Register object, Operand offset)
void AssertConstructor(Register object) NOOP_UNLESS_DEBUG_CODE
void LoadCompressedMap(Register dst, Register object)
void CallRuntime(const Runtime::Function *f, int num_arguments)
void LoadWeakValue(Register out, Register in, Label *target_if_cleared)
void Ceil_d(FPURegister fd, FPURegister fj)
void EnterFrame(StackFrame::Type type, bool load_constant_pool_pointer_reg)
void GenerateTailCallToReturnedCode(Runtime::FunctionId function_id)
void LoadCodePointerField(Register destination, MemOperand field_operand)
void AssertJSAny(Register object, Register map_tmp, Register tmp, AbortReason abort_reason) NOOP_UNLESS_DEBUG_CODE
void Push(Tagged< Smi > smi)
void CallEphemeronKeyBarrier(Register object, Operand offset, SaveFPRegsMode fp_mode)
void Float32MaxOutOfLine(FPURegister dst, FPURegister src1, FPURegister src2)
void Float32Max(FPURegister dst, FPURegister src1, FPURegister src2, Label *out_of_line)
void AssertFeedbackCell(Register object, Register scratch) NOOP_UNLESS_DEBUG_CODE
void JumpCodeObject(Register code_object, CodeEntrypointTag tag, JumpMode jump_mode=JumpMode::kJump)
void CallForDeoptimization(Builtin target, int deopt_id, Label *exit, DeoptimizeKind kind, Label *ret, Label *jump_deoptimization_entry_label)
void Ffint_s_ul(FPURegister fd, FPURegister fj)
void StoreTrustedPointerField(Register value, MemOperand dst_field_operand)
void Call(Label *target)
void DropArgumentsAndPushNewReceiver(Register argc, Register receiver)
void StoreSandboxedPointerField(Register value, MemOperand dst_field_operand)
void LoadEntryFromBuiltinIndex(Register builtin_index, Register target)
void Branch(Label *label, Condition cond, Register r1, const Operand &r2, bool need_link=false)
void JumpIfLessThan(Register a, int32_t b, Label *dest)
void MulOverflow_d(Register dst, Register left, const Operand &right, Register overflow)
static bool IsNearCallOffset(int64_t offset)
void Move(Register output, MemOperand operand)
void LoadLabelRelative(Register dst, Label *target)
void Ctz_d(Register rd, Register rj)
void ReplaceClosureCodeWithOptimizedCode(Register optimized_code, Register closure)
void DecompressTagged(Register dst, Register src)
void Push(Register src1, Register src2, Register src3, Register src4)
void Popcnt_d(Register rd, Register rj)
void Jump(Address target, RelocInfo::Mode rmode, COND_ARGS)
void AssertBoundFunction(Register object) NOOP_UNLESS_DEBUG_CODE
void CallRecordWriteStubSaveRegisters(Register object, Operand offset, SaveFPRegsMode fp_mode, StubCallMode mode=StubCallMode::kCallBuiltinPointer)
void Branch(Label *label, bool need_link=false)
void Ld_hu(Register rd, const MemOperand &rj)
void LoadTaggedSignedField(Register destination, const MemOperand &field_operand)
void OptimizeCodeOrTailCallOptimizedCodeSlot(Register flags, Register feedback_vector)
void LoadIsolateField(Register dst, IsolateFieldId id)
void LoadRoot(Register destination, RootIndex index, Condition cond, Register src1, const Operand &src2)
void Ftintrz_uw_s(FPURegister fd, FPURegister fs, FPURegister scratch)
void LoadZeroIfFPUCondition(Register dest, CFRegister=FCC0)
void CompareIsNanF32(FPURegister cmp1, FPURegister cmp2, CFRegister cd=FCC0)
void Move_s(FPURegister dst, FPURegister src)
void BranchTrueShortF(Label *target, CFRegister cc=FCC0)
void MaybeRestoreRegisters(RegList registers)
void CallRecordWriteStub(Register object, Register slot_address, SaveFPRegsMode fp_mode, StubCallMode mode=StubCallMode::kCallBuiltinPointer)
void CallDebugOnFunctionCall(Register fun, Register new_target, Register expected_parameter_count_or_dispatch_handle, Register actual_parameter_count)
void Popcnt_w(Register rd, Register rj)
void PrepareCallCFunction(int num_reg_arguments, int num_double_registers, Register scratch)
void Fld_s(FPURegister fd, const MemOperand &src)
void St_b(Register rd, const MemOperand &rj)
void StoreTaggedField(Register src, const MemOperand &dst)
void MultiPop(RegList regs1, RegList regs2, RegList regs3)
int PopCallerSaved(SaveFPRegsMode fp_mode, Register exclusion1=no_reg, Register exclusion2=no_reg, Register exclusion3=no_reg)
void AssertUndefinedOrAllocationSite(Register object, Register scratch) NOOP_UNLESS_DEBUG_CODE
void Bstrins_w(Register rk, Register rj, uint16_t msbw, uint16_t lswb)
void SmiUntagField(Register dst, const MemOperand &src)
void StubPrologue(StackFrame::Type type)
void Ftintrz_uw_d(Register rd, FPURegister fj, FPURegister scratch)
void St_h(Register rd, const MemOperand &rj)
void StoreRootRelative(int32_t offset, Register value) final
void Move(FPURegister dst, uint32_t src)
void Push(Register src1, Register src2)
void LoadMap(Register destination, Register object)
void SmiUntag(Register dst, const MemOperand &src)
void CompareF64(FPURegister cmp1, FPURegister cmp2, FPUCondition cc, CFRegister cd=FCC0)
void TailCallRuntime(Runtime::FunctionId fid)
void SubOverflow_d(Register dst, Register left, const Operand &right, Register overflow)
void CallRuntime(Runtime::FunctionId fid, int num_arguments)
void Floor_s(FPURegister fd, FPURegister fj)
void LoadNativeContextSlot(Register dst, int index)
void MultiPush(RegList regs1, RegList regs2)
void CompareF(FPURegister cmp1, FPURegister cmp2, FPUCondition cc, CFRegister cd, bool f32=true)
void Ftintrz_uw_d(FPURegister fd, FPURegister fj, FPURegister scratch)
void Ld_wu(Register rd, const MemOperand &rj)
void Ftintrz_uw_s(Register rd, FPURegister fs, FPURegister scratch)
void DropArguments(Register count)
void Ld_w(Register rd, const MemOperand &rj)
void AssertCallableFunction(Register object) NOOP_UNLESS_DEBUG_CODE
void GenPCRelativeJump(Register rd, int64_t offset)
void SmiScale(Register dst, Register src, int scale)
#define NOOP_UNLESS_DEBUG_CODE
Definition assembler.h:628
DirectHandle< Object > new_target
Definition execution.cc:75
Label label
Isolate * isolate
int32_t offset
TNode< Object > receiver
ZoneVector< RpoNumber > & result
LiftoffRegister reg
uint32_t const mask
#define DEFINE_INSTRUCTION2(instr)
#define COND_ARGS
#define DEFINE_INSTRUCTION(instr)
SmiCheck
ComparisonMode
ArgumentAdaptionMode
InvokeType
SetIsolateDataSlots
JumpMode
RegListBase< RegisterT > registers
InstructionOperand destination
constexpr Register no_reg
V8_INLINE IndirectHandle< T > handle(Tagged< T > object, Isolate *isolate)
Definition handles-inl.h:72
constexpr Register kRootRegister
MemOperand ExitFrameCallerStackSlotOperand(int index)
const int kSmiTagSize
Definition v8-internal.h:87
Address Tagged_t
Definition globals.h:547
constexpr int L
constexpr int kSmiShift
MemOperand FieldMemOperand(Register object, int offset)
constexpr uint8_t kInstrSizeLog2
constexpr int kSystemPointerSize
Definition globals.h:410
constexpr bool SmiValuesAre31Bits()
const int kHeapObjectTag
Definition v8-internal.h:72
V8_EXPORT_PRIVATE FlagValues v8_flags
constexpr bool SmiValuesAre32Bits()
constexpr Register kPtrComprCageBaseRegister
const intptr_t kSmiTagMask
Definition v8-internal.h:88
void CallApiFunctionAndReturn(MacroAssembler *masm, bool with_profiling, Register function_address, ExternalReference thunk_ref, Register thunk_arg, int slots_to_drop_on_return, MemOperand *argc_operand, MemOperand return_value_operand)
const int kSmiTag
Definition v8-internal.h:86
Register GetRegisterThatIsNotOneOf(Register reg1, Register reg2=no_reg, Register reg3=no_reg, Register reg4=no_reg, Register reg5=no_reg, Register reg6=no_reg)
MemOperand ExitFrameStackSlotOperand(int offset)
#define UNREACHABLE()
Definition logging.h:67
#define DCHECK_GE(v1, v2)
Definition logging.h:488
#define DCHECK(condition)
Definition logging.h:482
#define V8_EXPORT_PRIVATE
Definition macros.h:460
std::optional< Register > scratch_reg
std::optional< DoubleRegister > scratch_fpreg
std::optional< UseScratchRegisterScope > temps