v8
V8 is Google’s open source high-performance JavaScript and WebAssembly engine, written in C++.
Loading...
Searching...
No Matches
macro-assembler-ppc.h
Go to the documentation of this file.
1// Copyright 2014 the V8 project authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
5#ifndef V8_CODEGEN_PPC_MACRO_ASSEMBLER_PPC_H_
6#define V8_CODEGEN_PPC_MACRO_ASSEMBLER_PPC_H_
7
8#ifndef INCLUDED_FROM_MACRO_ASSEMBLER_H
9#error This header must be included via macro-assembler.h
10#endif
11
16#include "src/common/globals.h"
20
21namespace v8 {
22namespace internal {
23
25
26// ----------------------------------------------------------------------------
27// Static helper functions
28
29// Generate a MemOperand for loading a field from an object.
30inline MemOperand FieldMemOperand(Register object, int offset) {
31 return MemOperand(object, offset - kHeapObjectTag);
32}
33
35
36Register GetRegisterThatIsNotOneOf(Register reg1, Register reg2 = no_reg,
37 Register reg3 = no_reg,
38 Register reg4 = no_reg,
39 Register reg5 = no_reg,
40 Register reg6 = no_reg);
41
42// These exist to provide portability between 32 and 64bit
43#define ClearLeftImm clrldi
44#define ClearRightImm clrrdi
45
46class V8_EXPORT_PRIVATE MacroAssembler : public MacroAssemblerBase {
47 public:
48 using MacroAssemblerBase::MacroAssemblerBase;
49
50 void CallBuiltin(Builtin builtin, Condition cond = al);
51 void TailCallBuiltin(Builtin builtin, Condition cond = al,
52 CRegister cr = cr0);
53 void Popcnt32(Register dst, Register src);
54 void Popcnt64(Register dst, Register src);
55 // Converts the integer (untagged smi) in |src| to a double, storing
56 // the result to |dst|
58
59 // Converts the unsigned integer (untagged smi) in |src| to
60 // a double, storing the result to |dst|
62
63 // Converts the integer (untagged smi) in |src| to
64 // a float, storing the result in |dst|
66
67 // Converts the unsigned integer (untagged smi) in |src| to
68 // a float, storing the result in |dst|
70
75
76 // Converts the double_input to an integer. Note that, upon return,
77 // the contents of double_dst will also hold the fixed point representation.
78 void ConvertDoubleToInt64(const DoubleRegister double_input,
79 const Register dst, const DoubleRegister double_dst,
80 FPRoundingMode rounding_mode = kRoundToZero);
81
82 // Converts the double_input to an unsigned integer. Note that, upon return,
83 // the contents of double_dst will also hold the fixed point representation.
85 const DoubleRegister double_input, const Register dst,
86 const DoubleRegister double_dst,
87 FPRoundingMode rounding_mode = kRoundToZero);
88
89 // Activation support.
91 bool load_constant_pool_pointer_reg = false);
92
93 // Returns the pc offset at which the frame ends.
94 int LeaveFrame(StackFrame::Type type, int stack_adjustment = 0);
95
96 void AllocateStackSpace(int bytes) {
97 DCHECK_GE(bytes, 0);
98 if (bytes == 0) return;
99 AddS64(sp, sp, Operand(-bytes), r0);
100 }
101
102 void AllocateStackSpace(Register bytes) { sub(sp, sp, bytes); }
103
104 // Push a fixed frame, consisting of lr, fp, constant pool.
105 void PushCommonFrame(Register marker_reg = no_reg);
106
107 // Generates function and stub prologue code.
109 void Prologue();
110
113
114 // Push a standard frame, consisting of lr, fp, constant pool,
115 // context and JS function
116 void PushStandardFrame(Register function_reg);
117
118 // Restore caller's frame pointer and return address prior to being
119 // overwritten by tail call stack preparation.
121
122 // Get the actual activation frame alignment for target environment.
124
126 ExternalReference isolate_root = ExternalReference::isolate_root(isolate());
127 mov(kRootRegister, Operand(isolate_root));
128#ifdef V8_COMPRESS_POINTERS
129 LoadRootRelative(kPtrComprCageBaseRegister,
130 IsolateData::cage_base_offset());
131#endif
132 }
133
135 Register scratch);
136
137 // load a literal signed int value <value> to GPR <dst>
138 void LoadIntLiteral(Register dst, int value);
139 // load an SMI value <value> to GPR <dst>
141
142 void LoadPC(Register dst);
144
145 void CmpS64(Register src1, const Operand& src2, Register scratch,
146 CRegister cr = cr0);
147 void CmpS64(Register src1, Register src2, CRegister cr = cr0);
148 void CmpU64(Register src1, const Operand& src2, Register scratch,
149 CRegister cr = cr0);
150 void CmpU64(Register src1, Register src2, CRegister cr = cr0);
151 void CmpS32(Register src1, const Operand& src2, Register scratch,
152 CRegister cr = cr0);
153 void CmpS32(Register src1, Register src2, CRegister cr = cr0);
154 void CmpU32(Register src1, const Operand& src2, Register scratch,
155 CRegister cr = cr0);
156 void CmpU32(Register src1, Register src2, CRegister cr = cr0);
157 void CompareTagged(Register src1, Register src2, CRegister cr = cr0) {
159 CmpS32(src1, src2, cr);
160 } else {
161 CmpS64(src1, src2, cr);
162 }
163 }
164
166 DoubleRegister scratch = kScratchDoubleReg);
168 DoubleRegister scratch = kScratchDoubleReg);
169
170 // Set new rounding mode RN to FPSCR
172
173 // reset rounding mode to default (kRoundToNearest)
175
176 void AddS64(Register dst, Register src, const Operand& value,
177 Register scratch = r0, OEBit s = LeaveOE, RCBit r = LeaveRC);
178 void AddS64(Register dst, Register src, Register value, OEBit s = LeaveOE,
179 RCBit r = LeaveRC);
180 void SubS64(Register dst, Register src, const Operand& value,
181 Register scratch = r0, OEBit s = LeaveOE, RCBit r = LeaveRC);
182 void SubS64(Register dst, Register src, Register value, OEBit s = LeaveOE,
183 RCBit r = LeaveRC);
184 void AddS32(Register dst, Register src, const Operand& value,
185 Register scratch = r0, RCBit r = LeaveRC);
186 void AddS32(Register dst, Register src, Register value, RCBit r = LeaveRC);
187 void SubS32(Register dst, Register src, const Operand& value,
188 Register scratch = r0, RCBit r = LeaveRC);
189 void SubS32(Register dst, Register src, Register value, RCBit r = LeaveRC);
190 void MulS64(Register dst, Register src, const Operand& value,
191 Register scratch = r0, OEBit s = LeaveOE, RCBit r = LeaveRC);
192 void MulS64(Register dst, Register src, Register value, OEBit s = LeaveOE,
193 RCBit r = LeaveRC);
194 void MulS32(Register dst, Register src, const Operand& value,
195 Register scratch = r0, OEBit s = LeaveOE, RCBit r = LeaveRC);
196 void MulS32(Register dst, Register src, Register value, OEBit s = LeaveOE,
197 RCBit r = LeaveRC);
198 void DivS64(Register dst, Register src, Register value, OEBit s = LeaveOE,
199 RCBit r = LeaveRC);
200 void DivU64(Register dst, Register src, Register value, OEBit s = LeaveOE,
201 RCBit r = LeaveRC);
202 void DivS32(Register dst, Register src, Register value, OEBit s = LeaveOE,
203 RCBit r = LeaveRC);
204 void DivU32(Register dst, Register src, Register value, OEBit s = LeaveOE,
205 RCBit r = LeaveRC);
206 void ModS64(Register dst, Register src, Register value);
207 void ModU64(Register dst, Register src, Register value);
208 void ModS32(Register dst, Register src, Register value);
209 void ModU32(Register dst, Register src, Register value);
210
211 void AndU64(Register dst, Register src, const Operand& value,
212 Register scratch = r0, RCBit r = SetRC);
213 void AndU64(Register dst, Register src, Register value, RCBit r = SetRC);
214 void OrU64(Register dst, Register src, const Operand& value,
215 Register scratch = r0, RCBit r = SetRC);
216 void OrU64(Register dst, Register src, Register value, RCBit r = LeaveRC);
217 void XorU64(Register dst, Register src, const Operand& value,
218 Register scratch = r0, RCBit r = SetRC);
219 void XorU64(Register dst, Register src, Register value, RCBit r = LeaveRC);
220 void AndU32(Register dst, Register src, const Operand& value,
221 Register scratch = r0, RCBit r = SetRC);
222 void AndU32(Register dst, Register src, Register value, RCBit r = SetRC);
223 void OrU32(Register dst, Register src, const Operand& value,
224 Register scratch = r0, RCBit r = SetRC);
225 void OrU32(Register dst, Register src, Register value, RCBit r = LeaveRC);
226 void XorU32(Register dst, Register src, const Operand& value,
227 Register scratch = r0, RCBit r = SetRC);
228 void XorU32(Register dst, Register src, Register value, RCBit r = LeaveRC);
229
230 void ShiftLeftU64(Register dst, Register src, const Operand& value,
231 RCBit r = LeaveRC);
232 void ShiftRightU64(Register dst, Register src, const Operand& value,
233 RCBit r = LeaveRC);
234 void ShiftRightS64(Register dst, Register src, const Operand& value,
235 RCBit r = LeaveRC);
236 void ShiftLeftU32(Register dst, Register src, const Operand& value,
237 RCBit r = LeaveRC);
238 void ShiftRightU32(Register dst, Register src, const Operand& value,
239 RCBit r = LeaveRC);
240 void ShiftRightS32(Register dst, Register src, const Operand& value,
241 RCBit r = LeaveRC);
243 RCBit r = LeaveRC);
245 RCBit r = LeaveRC);
247 RCBit r = LeaveRC);
249 RCBit r = LeaveRC);
251 RCBit r = LeaveRC);
253 RCBit r = LeaveRC);
254
255 void CountLeadingZerosU32(Register dst, Register src, RCBit r = LeaveRC);
256 void CountLeadingZerosU64(Register dst, Register src, RCBit r = LeaveRC);
257 void CountTrailingZerosU32(Register dst, Register src, Register scratch1 = ip,
258 Register scratch2 = r0, RCBit r = LeaveRC);
259 void CountTrailingZerosU64(Register dst, Register src, Register scratch1 = ip,
260 Register scratch2 = r0, RCBit r = LeaveRC);
261
262 void ClearByteU64(Register dst, int byte_idx);
263 void ReverseBitsU64(Register dst, Register src, Register scratch1,
264 Register scratch2);
265 void ReverseBitsU32(Register dst, Register src, Register scratch1,
266 Register scratch2);
268 Register scratch1, Register scratch2,
269 int byte_idx);
270
272 RCBit r = LeaveRC);
274 RCBit r = LeaveRC);
276 RCBit r = LeaveRC);
278 RCBit r = LeaveRC);
280 RCBit r = LeaveRC);
282 RCBit r = LeaveRC);
284 RCBit r = LeaveRC);
286 RCBit r = LeaveRC);
288 RCBit r = LeaveRC);
289
290 template <class _type>
291 void SignedExtend(Register dst, Register value) {
292 switch (sizeof(_type)) {
293 case 1:
294 extsb(dst, value);
295 break;
296 case 2:
297 extsh(dst, value);
298 break;
299 case 4:
300 extsw(dst, value);
301 break;
302 case 8:
303 if (dst != value) mr(dst, value);
304 break;
305 default:
306 UNREACHABLE();
307 }
308 }
309
310 template <class _type>
311 void ZeroExtend(Register dst, Register value) {
312 switch (sizeof(_type)) {
313 case 1:
314 ZeroExtByte(dst, value);
315 break;
316 case 2:
317 ZeroExtHalfWord(dst, value);
318 break;
319 case 4:
320 ZeroExtWord32(dst, value);
321 break;
322 case 8:
323 if (dst != value) mr(dst, value);
324 break;
325 default:
326 UNREACHABLE();
327 }
328 }
329 template <class _type>
330 void ExtendValue(Register dst, Register value) {
331 if (std::is_signed<_type>::value) {
332 SignedExtend<_type>(dst, value);
333 } else {
334 ZeroExtend<_type>(dst, value);
335 }
336 }
337
338 template <class _type>
339 void LoadReserve(Register output, MemOperand dst) {
340 switch (sizeof(_type)) {
341 case 1:
342 lbarx(output, dst);
343 break;
344 case 2:
345 lharx(output, dst);
346 break;
347 case 4:
348 lwarx(output, dst);
349 break;
350 case 8:
351 ldarx(output, dst);
352 break;
353 default:
354 UNREACHABLE();
355 }
356 if (std::is_signed<_type>::value) {
357 SignedExtend<_type>(output, output);
358 }
359 }
360
361 template <class _type>
363 switch (sizeof(_type)) {
364 case 1:
365 stbcx(value, dst);
366 break;
367 case 2:
368 sthcx(value, dst);
369 break;
370 case 4:
371 stwcx(value, dst);
372 break;
373 case 8:
374 stdcx(value, dst);
375 break;
376 default:
377 UNREACHABLE();
378 }
379 }
380
381 template <class _type>
383 Register new_value, Register output,
384 Register scratch) {
385 Label loop;
386 Label exit;
387 if (sizeof(_type) != 8) {
388 ExtendValue<_type>(scratch, old_value);
389 old_value = scratch;
390 }
391 lwsync();
392 bind(&loop);
393 LoadReserve<_type>(output, dst);
394 cmp(output, old_value, cr0);
395 bne(&exit, cr0);
396 StoreConditional<_type>(new_value, dst);
397 bne(&loop, cr0);
398 bind(&exit);
399 sync();
400 }
401
402 template <class _type>
403 void AtomicExchange(MemOperand dst, Register new_value, Register output) {
404 Label exchange;
405 lwsync();
406 bind(&exchange);
407 LoadReserve<_type>(output, dst);
408 StoreConditional<_type>(new_value, dst);
409 bne(&exchange, cr0);
410 sync();
411 }
412
413 template <class _type, class bin_op>
414 void AtomicOps(MemOperand dst, Register value, Register output,
415 Register result, bin_op op) {
416 Label binop;
417 lwsync();
418 bind(&binop);
419 switch (sizeof(_type)) {
420 case 1:
421 lbarx(output, dst);
422 break;
423 case 2:
424 lharx(output, dst);
425 break;
426 case 4:
427 lwarx(output, dst);
428 break;
429 case 8:
430 ldarx(output, dst);
431 break;
432 default:
433 UNREACHABLE();
434 }
435 op(result, output, value);
436 switch (sizeof(_type)) {
437 case 1:
438 stbcx(result, dst);
439 break;
440 case 2:
441 sthcx(result, dst);
442 break;
443 case 4:
444 stwcx(result, dst);
445 break;
446 case 8:
447 stdcx(result, dst);
448 break;
449 default:
450 UNREACHABLE();
451 }
452 bne(&binop, cr0);
453 sync();
454 }
455
456 void Push(Register src) { push(src); }
457 // Push a handle.
459 void Push(Tagged<Smi> smi);
460
461 // Push two registers. Pushes leftmost register first (to highest address).
462 void Push(Register src1, Register src2) {
463 StoreU64WithUpdate(src2, MemOperand(sp, -2 * kSystemPointerSize));
464 StoreU64(src1, MemOperand(sp, kSystemPointerSize));
465 }
466
467 // Push three registers. Pushes leftmost register first (to highest address).
468 void Push(Register src1, Register src2, Register src3) {
469 StoreU64WithUpdate(src3, MemOperand(sp, -3 * kSystemPointerSize));
470 StoreU64(src2, MemOperand(sp, kSystemPointerSize));
471 StoreU64(src1, MemOperand(sp, 2 * kSystemPointerSize));
472 }
473
474 // Push four registers. Pushes leftmost register first (to highest address).
475 void Push(Register src1, Register src2, Register src3, Register src4) {
476 StoreU64WithUpdate(src4, MemOperand(sp, -4 * kSystemPointerSize));
477 StoreU64(src3, MemOperand(sp, kSystemPointerSize));
478 StoreU64(src2, MemOperand(sp, 2 * kSystemPointerSize));
479 StoreU64(src1, MemOperand(sp, 3 * kSystemPointerSize));
480 }
481
482 // Push five registers. Pushes leftmost register first (to highest address).
483 void Push(Register src1, Register src2, Register src3, Register src4,
484 Register src5) {
485 StoreU64WithUpdate(src5, MemOperand(sp, -5 * kSystemPointerSize));
486 StoreU64(src4, MemOperand(sp, kSystemPointerSize));
487 StoreU64(src3, MemOperand(sp, 2 * kSystemPointerSize));
488 StoreU64(src2, MemOperand(sp, 3 * kSystemPointerSize));
489 StoreU64(src1, MemOperand(sp, 4 * kSystemPointerSize));
490 }
491
492 enum PushArrayOrder { kNormal, kReverse };
493 void PushArray(Register array, Register size, Register scratch,
494 Register scratch2, PushArrayOrder order = kNormal);
495
496 void Pop(Register dst) { pop(dst); }
497
498 // Pop two registers. Pops rightmost register first (from lower address).
499 void Pop(Register src1, Register src2) {
500 LoadU64(src2, MemOperand(sp, 0));
501 LoadU64(src1, MemOperand(sp, kSystemPointerSize));
502 addi(sp, sp, Operand(2 * kSystemPointerSize));
503 }
504
505 // Pop three registers. Pops rightmost register first (from lower address).
506 void Pop(Register src1, Register src2, Register src3) {
507 LoadU64(src3, MemOperand(sp, 0));
508 LoadU64(src2, MemOperand(sp, kSystemPointerSize));
509 LoadU64(src1, MemOperand(sp, 2 * kSystemPointerSize));
510 addi(sp, sp, Operand(3 * kSystemPointerSize));
511 }
512
513 // Pop four registers. Pops rightmost register first (from lower address).
514 void Pop(Register src1, Register src2, Register src3, Register src4) {
515 LoadU64(src4, MemOperand(sp, 0));
516 LoadU64(src3, MemOperand(sp, kSystemPointerSize));
517 LoadU64(src2, MemOperand(sp, 2 * kSystemPointerSize));
518 LoadU64(src1, MemOperand(sp, 3 * kSystemPointerSize));
519 addi(sp, sp, Operand(4 * kSystemPointerSize));
520 }
521
522 // Pop five registers. Pops rightmost register first (from lower address).
523 void Pop(Register src1, Register src2, Register src3, Register src4,
524 Register src5) {
525 LoadU64(src5, MemOperand(sp, 0));
526 LoadU64(src4, MemOperand(sp, kSystemPointerSize));
527 LoadU64(src3, MemOperand(sp, 2 * kSystemPointerSize));
528 LoadU64(src2, MemOperand(sp, 3 * kSystemPointerSize));
529 LoadU64(src1, MemOperand(sp, 4 * kSystemPointerSize));
530 addi(sp, sp, Operand(5 * kSystemPointerSize));
531 }
532
535
536 void CallEphemeronKeyBarrier(Register object, Register slot_address,
537 SaveFPRegsMode fp_mode);
538
540 SaveFPRegsMode fp_mode,
542
544 Register object, Register slot_address, SaveFPRegsMode fp_mode,
545 StubCallMode mode = StubCallMode::kCallBuiltinPointer);
547 Register object, Register slot_address, SaveFPRegsMode fp_mode,
548 StubCallMode mode = StubCallMode::kCallBuiltinPointer);
549
550 void MultiPush(RegList regs, Register location = sp);
551 void MultiPop(RegList regs, Register location = sp);
552
553 void MultiPushDoubles(DoubleRegList dregs, Register location = sp);
554 void MultiPopDoubles(DoubleRegList dregs, Register location = sp);
555
557 Register location = sp);
559 Register location = sp);
560
562 Register scratch1, Register scratch2,
563 Register location = sp);
565 Register scratch1, Register scratch2,
566 Register location = sp);
567
568 // Calculate how much stack space (in bytes) are required to store caller
569 // registers excluding those specified in the arguments.
571 Register exclusion1 = no_reg,
572 Register exclusion2 = no_reg,
573 Register exclusion3 = no_reg) const;
574
575 // Push caller saved registers on the stack, and return the number of bytes
576 // stack pointer is adjusted.
578 Register scratch2, Register exclusion1 = no_reg,
579 Register exclusion2 = no_reg,
580 Register exclusion3 = no_reg);
581 // Restore caller saved registers from the stack, and return the number of
582 // bytes stack pointer is adjusted.
584 Register scratch2, Register exclusion1 = no_reg,
585 Register exclusion2 = no_reg,
586 Register exclusion3 = no_reg);
587
588 // Load an object from the root table.
590 LoadRoot(destination, index, al);
591 }
594
595 void SwapP(Register src, Register dst, Register scratch);
596 void SwapP(Register src, MemOperand dst, Register scratch);
597 void SwapP(MemOperand src, MemOperand dst, Register scratch_0,
598 Register scratch_1);
600 DoubleRegister scratch);
603 DoubleRegister scratch_1);
605 DoubleRegister scratch);
608 DoubleRegister scratch_1);
610 Simd128Register scratch);
612 Simd128Register scratch1, Register scratch2);
614 Simd128Register scratch2, Register scratch3);
615
616 void ByteReverseU16(Register dst, Register val, Register scratch);
617 void ByteReverseU32(Register dst, Register val, Register scratch);
619
620 // Before calling a C-function from generated code, align arguments on stack.
621 // After aligning the frame, non-register arguments must be stored in
622 // sp[0], sp[4], etc., not pushed. The argument count assumes all arguments
623 // are word sized. If double arguments are used, this function assumes that
624 // all double arguments are stored before core registers; otherwise the
625 // correct alignment of the double values is not guaranteed.
626 // Some compilers/platforms require the stack to be aligned when calling
627 // C++ code.
628 // Needs a scratch register to do some arithmetic. This register will be
629 // trashed.
630 void PrepareCallCFunction(int num_reg_arguments, int num_double_registers,
631 Register scratch);
632 void PrepareCallCFunction(int num_reg_arguments, Register scratch);
633
634 // There are two ways of passing double arguments on ARM, depending on
635 // whether soft or hard floating point ABI is used. These functions
636 // abstract parameter passing for the three different ways we call
637 // C functions from generated code.
641
642 // Calls a C function and cleans up the space for arguments allocated
643 // by PrepareCallCFunction. The called function is not allowed to trigger a
644 // garbage collection, since that might move the code and invalidate the
645 // return address (unless this is somehow accounted for by the called
646 // function).
648 ExternalReference function, int num_arguments,
649 SetIsolateDataSlots set_isolate_data_slots = SetIsolateDataSlots::kYes,
650 bool has_function_descriptor = true);
652 Register function, int num_arguments,
653 SetIsolateDataSlots set_isolate_data_slots = SetIsolateDataSlots::kYes,
654 bool has_function_descriptor = true);
656 ExternalReference function, int num_reg_arguments,
657 int num_double_arguments,
658 SetIsolateDataSlots set_isolate_data_slots = SetIsolateDataSlots::kYes,
659 bool has_function_descriptor = true);
661 Register function, int num_reg_arguments, int num_double_arguments,
662 SetIsolateDataSlots set_isolate_data_slots = SetIsolateDataSlots::kYes,
663 bool has_function_descriptor = true);
664
667
668 void Trap();
670
671 // Calls Abort(msg) if the condition cond is not satisfied.
672 // Use --debug_code to enable.
673 void Assert(Condition cond, AbortReason reason,
675
676 // Like Assert(), but always enabled.
677 void Check(Condition cond, AbortReason reason, CRegister cr = cr0);
678
679 // Print a message to stdout and abort execution.
680 void Abort(AbortReason reason);
681
682 void LoadFromConstantsTable(Register destination, int constant_index) final;
685 void StoreRootRelative(int32_t offset, Register value) final;
686
687 // Operand pointing to an external reference.
688 // May emit code to set up the scratch register. The operand is
689 // only guaranteed to be correct as long as the scratch register
690 // isn't changed.
691 // If the operand is used more than once, use a scratch register
692 // that is guaranteed not to be clobbered.
694 Register scratch);
696 return ExternalReferenceAsOperand(ExternalReference::Create(id), no_reg);
697 }
698
699 // Jump, Call, and Ret pseudo instructions implementing inter-working.
700 void Jump(Register target);
701 void Jump(Address target, RelocInfo::Mode rmode, Condition cond = al,
702 CRegister cr = cr0);
703 void Jump(Handle<Code> code, RelocInfo::Mode rmode, Condition cond = al,
704 CRegister cr = cr0);
705 void Jump(const ExternalReference& reference);
706 void Jump(intptr_t target, RelocInfo::Mode rmode, Condition cond = al,
707 CRegister cr = cr0);
708 void Call(Register target);
709 void Call(Address target, RelocInfo::Mode rmode, Condition cond = al);
710 void Call(Handle<Code> code, RelocInfo::Mode rmode = RelocInfo::CODE_TARGET,
711 Condition cond = al);
712 void Call(Label* target);
713
714 void GetLabelAddress(Register dst, Label* target);
715
716 // Load the builtin given by the Smi in |builtin_index| into |target|.
717 void LoadEntryFromBuiltinIndex(Register builtin_index, Register target);
720
721#ifdef V8_ENABLE_LEAPTIERING
722 void LoadEntrypointFromJSDispatchTable(Register destination,
723 Register dispatch_handle,
724 Register scratch);
725#endif // V8_ENABLE_LEAPTIERING
726
727 // Load the code entry point from the Code object.
729 Register destination, Register code_object,
730 CodeEntrypointTag tag = kDefaultCodeEntrypointTag);
731 void CallCodeObject(Register code_object);
732 void JumpCodeObject(Register code_object,
733 JumpMode jump_mode = JumpMode::kJump);
734
735 void CallBuiltinByIndex(Register builtin_index, Register target);
736
737 // TODO(olivf, 42204201) Rename this to AssertNotDeoptimized once
738 // non-leaptiering is removed from the codebase.
740 void CallForDeoptimization(Builtin target, int deopt_id, Label* exit,
742 Label* jump_deoptimization_entry_label);
743
744 // Emit code to discard a non-negative number of pointer-sized elements
745 // from the stack, clobbering only the sp register.
746 void Drop(int count);
747 void Drop(Register count, Register scratch = r0);
748
749 void Ret() { blr(); }
750 void Ret(Condition cond, CRegister cr = cr0) { bclr(cond, cr); }
751 void Ret(int drop) {
752 Drop(drop);
753 blr();
754 }
755
756 // If the value is a NaN, canonicalize the value else, do nothing.
758 void CanonicalizeNaN(const DoubleRegister value) {
759 CanonicalizeNaN(value, value);
760 }
761 void CheckPageFlag(Register object, Register scratch, int mask, Condition cc,
762 Label* condition_met);
763
764 // Move values between integer and floating point registers.
767 Register scratch);
769 Register src);
771 Register src_lo, Register scratch);
777 Register dst, DoubleRegister src);
780 // Register move. May do nothing if the registers are identical.
781 void Move(Register dst, Tagged<Smi> smi) { LoadSmiLiteral(dst, smi); }
783 RelocInfo::Mode rmode = RelocInfo::FULL_EMBEDDED_OBJECT);
784 void Move(Register dst, ExternalReference reference);
786 void Move(Register dst, Register src, Condition cond = al);
788 void Move(Register dst, const MemOperand& src) {
789 // TODO(johnyan): Use scratch register scope instead of r0.
790 LoadU64(dst, src, r0);
791 }
792
793 void SmiUntag(Register dst, const MemOperand& src, RCBit rc = LeaveRC,
794 Register scratch = no_reg);
795 void SmiUntag(Register reg, RCBit rc = LeaveRC) { SmiUntag(reg, reg, rc); }
796
797 void SmiUntag(Register dst, Register src, RCBit rc = LeaveRC) {
799 srawi(dst, src, kSmiShift, rc);
800 } else {
801 ShiftRightS64(dst, src, Operand(kSmiShift), rc);
802 }
803 }
805 if (v8_flags.enable_slow_asserts) {
806 AssertSmi(smi);
807 }
809 SmiUntag(smi);
810 }
811
812 // Shift left by kSmiShift
813 void SmiTag(Register reg, RCBit rc = LeaveRC) { SmiTag(reg, reg, rc); }
814 void SmiTag(Register dst, Register src, RCBit rc = LeaveRC) {
815 ShiftLeftU64(dst, src, Operand(kSmiShift), rc);
816 }
817
818 // Abort execution if argument is a smi, enabled via --debug-code.
821
825
826 // ---------------------------------------------------------------------------
827 // Bit testing/extraction
828 //
829 // Bit numbering is such that the least significant bit is bit 0
830 // (for consistency between 32/64-bit).
831
832 // Extract consecutive bits (defined by rangeStart - rangeEnd) from src
833 // and, if !test, shift them into the least significant bits of dst.
834 inline void ExtractBitRange(Register dst, Register src, int rangeStart,
835 int rangeEnd, RCBit rc = LeaveRC,
836 bool test = false) {
837 DCHECK(rangeStart >= rangeEnd && rangeStart < kBitsPerSystemPointer);
838 int rotate = (rangeEnd == 0) ? 0 : kBitsPerSystemPointer - rangeEnd;
839 int width = rangeStart - rangeEnd + 1;
840 if (rc == SetRC && rangeStart < 16 && (rangeEnd == 0 || test)) {
841 // Prefer faster andi when applicable.
842 andi(dst, src, Operand(((1 << width) - 1) << rangeEnd));
843 } else {
844 rldicl(dst, src, rotate, kBitsPerSystemPointer - width, rc);
845 }
846 }
847
848 inline void ExtractBit(Register dst, Register src, uint32_t bitNumber,
849 RCBit rc = LeaveRC, bool test = false) {
850 ExtractBitRange(dst, src, bitNumber, bitNumber, rc, test);
851 }
852
853 // Extract consecutive bits (defined by mask) from src and place them
854 // into the least significant bits of dst.
855 inline void ExtractBitMask(Register dst, Register src, uintptr_t mask,
856 RCBit rc = LeaveRC, bool test = false) {
858 int end;
859 uintptr_t bit = (1L << start);
860
861 while (bit && (mask & bit) == 0) {
862 start--;
863 bit >>= 1;
864 }
865 end = start;
866 bit >>= 1;
867
868 while (bit && (mask & bit)) {
869 end--;
870 bit >>= 1;
871 }
872
873 // 1-bits in mask must be contiguous
874 DCHECK(bit == 0 || (mask & ((bit << 1) - 1)) == 0);
875
876 ExtractBitRange(dst, src, start, end, rc, test);
877 }
878
879 // Test single bit in value.
880 inline void TestBit(Register value, int bitNumber, Register scratch = r0) {
881 ExtractBitRange(scratch, value, bitNumber, bitNumber, SetRC, true);
882 }
883
884 // Test consecutive bit range in value. Range is defined by mask.
885 inline void TestBitMask(Register value, uintptr_t mask,
886 Register scratch = r0) {
887 ExtractBitMask(scratch, value, mask, SetRC, true);
888 }
889 // Test consecutive bit range in value. Range is defined by
890 // rangeStart - rangeEnd.
891 inline void TestBitRange(Register value, int rangeStart, int rangeEnd,
892 Register scratch = r0) {
893 ExtractBitRange(scratch, value, rangeStart, rangeEnd, SetRC, true);
894 }
895
896 inline void TestIfSmi(Register value, Register scratch) {
897 TestBitRange(value, kSmiTagSize - 1, 0, scratch);
898 }
899 // Jump the register contains a smi.
900 inline void JumpIfSmi(Register value, Label* smi_label) {
901 TestIfSmi(value, r0);
902 beq(smi_label, cr0); // branch if SMI
903 }
904 void JumpIfEqual(Register x, int32_t y, Label* dest);
905 void JumpIfLessThan(Register x, int32_t y, Label* dest);
906
908 void LoadCompressedMap(Register dst, Register object, Register scratch);
909
910 void LoadFeedbackVector(Register dst, Register closure, Register scratch,
911 Label* fbv_undef);
912
913 inline void TestIfInt32(Register value, Register scratch,
914 CRegister cr = cr0) {
915 // High bits must be identical to fit into an 32-bit integer
916 extsw(scratch, value);
917 CmpS64(scratch, value, cr);
918 }
919
920 // Overflow handling functions.
921 // Usage: call the appropriate arithmetic function and then call one of the
922 // flow control functions with the corresponding label.
923
924 // Compute dst = left + right, setting condition codes. dst may be same as
925 // either left or right (or a unique register). left and right must not be
926 // the same register.
928 Register overflow_dst, Register scratch = r0);
929 void AddAndCheckForOverflow(Register dst, Register left, intptr_t right,
930 Register overflow_dst, Register scratch = r0);
931
932 // Compute dst = left - right, setting condition codes. dst may be same as
933 // either left or right (or a unique register). left and right must not be
934 // the same register.
936 Register overflow_dst, Register scratch = r0);
937
938 // Performs a truncating conversion of a floating point number as used by
939 // the JS bitwise operations. See ECMA-262 9.5: ToInt32. Goes to 'done' if it
940 // succeeds, otherwise falls through if result is saturated. On return
941 // 'result' either holds answer, or is clobbered on fall through.
943 Label* done);
945 DoubleRegister double_input, StubCallMode stub_mode);
946
948
949 // Loads the constant pool pointer (kConstantPoolRegister).
951 Register code_target_address, Register scratch1, Register scratch2);
953#ifdef DEBUG
954 // Avoid DCHECK(!is_linked()) failure in ~Label()
955 bind(ConstantPoolPosition());
956#endif
957 }
958
959 // Convenience functions to call/jmp to the code of a JSFunction object.
960 void CallJSFunction(Register function_object, uint16_t argument_count,
961 Register scratch);
962 void JumpJSFunction(Register function_object, Register scratch,
963 JumpMode jump_mode = JumpMode::kJump);
964#ifdef V8_ENABLE_LEAPTIERING
965 void CallJSDispatchEntry(JSDispatchHandle dispatch_handle,
966 uint16_t argument_count);
967#endif
968#ifdef V8_ENABLE_WEBASSEMBLY
969 void ResolveWasmCodePointer(Register target);
970 void CallWasmCodePointer(Register target,
971 CallJumpMode call_jump_mode = CallJumpMode::kCall);
972 void LoadWasmCodePointer(Register dst, MemOperand src);
973#endif
974
975 // Generates an instruction sequence s.t. the return address points to the
976 // instruction following the call.
977 // The return address on the stack is used by frame iteration.
979
980 // Enforce platform specific stack alignment.
982
983 // Control-flow integrity:
984
985 // Define a function entrypoint. This doesn't emit any code for this
986 // architecture, as control-flow integrity is not supported for it.
987 void CodeEntry() {}
988 // Define an exception handler.
990 // Define an exception handler and bind a label.
992
993 // ---------------------------------------------------------------------------
994 // V8 Sandbox support
995
996 // Transform a SandboxedPointer from/to its encoded form, which is used when
997 // the pointer is stored on the heap and ensures that the pointer will always
998 // point into the sandbox.
1001 const MemOperand& field_operand,
1002 Register scratch = no_reg);
1004 const MemOperand& dst_field_operand,
1005 Register scratch = no_reg);
1006
1007 // Loads a field containing off-heap pointer and does necessary decoding
1008 // if sandboxed external pointers are enabled.
1011 Register isolate_root = no_reg,
1012 Register scratch = no_reg);
1013
1014 // Load a trusted pointer field.
1015 // When the sandbox is enabled, these are indirect pointers using the trusted
1016 // pointer table. Otherwise they are regular tagged fields.
1019 Register scratch = no_reg);
1020
1021 // Store a trusted pointer field.
1022 // When the sandbox is enabled, these are indirect pointers using the trusted
1023 // pointer table. Otherwise they are regular tagged fields.
1024 void StoreTrustedPointerField(Register value, MemOperand dst_field_operand,
1025 Register scratch = no_reg);
1026
1027 // Load a code pointer field.
1028 // These are special versions of trusted pointers that, when the sandbox is
1029 // enabled, reference code objects through the code pointer table.
1031 Register scratch) {
1032 LoadTrustedPointerField(destination, field_operand, kCodeIndirectPointerTag,
1033 scratch);
1034 }
1035 // Store a code pointer field.
1036 void StoreCodePointerField(Register value, MemOperand dst_field_operand,
1037 Register scratch = no_reg) {
1038 StoreTrustedPointerField(value, dst_field_operand, scratch);
1039 }
1040
1041 // Load an indirect pointer field.
1042 // Only available when the sandbox is enabled.
1044 IndirectPointerTag tag, Register scratch);
1045
1046 // Store an indirect pointer field.
1047 // Only available when the sandbox is enabled.
1048 void StoreIndirectPointerField(Register value, MemOperand dst_field_operand,
1049 Register scratch);
1050
1051#ifdef V8_ENABLE_SANDBOX
1052 // Retrieve the heap object referenced by the given indirect pointer handle,
1053 // which can either be a trusted pointer handle or a code pointer handle.
1054 void ResolveIndirectPointerHandle(Register destination, Register handle,
1056 Register scratch = no_reg);
1057
1058 // Retrieve the heap object referenced by the given trusted pointer handle.
1059 void ResolveTrustedPointerHandle(Register destination, Register handle,
1061 Register scratch = no_reg);
1062
1063 // Retrieve the Code object referenced by the given code pointer handle.
1064 void ResolveCodePointerHandle(Register destination, Register handle,
1065 Register scratch = no_reg);
1066
1067 // Load the pointer to a Code's entrypoint via a code pointer.
1068 // Only available when the sandbox is enabled as it requires the code pointer
1069 // table.
1070 void LoadCodeEntrypointViaCodePointer(Register destination,
1071 MemOperand field_operand,
1072 Register scratch = no_reg);
1073
1074 // Load the value of Code pointer table corresponding to
1075 // IsolateGroup::current()->code_pointer_table_.
1076 // Only available when the sandbox is enabled.
1077 void LoadCodePointerTableBase(Register destination);
1078#endif
1079
1080 // ---------------------------------------------------------------------------
1081 // Pointer compression Support
1082
1084#if defined(V8_COMPRESS_POINTERS) || defined(V8_31BIT_SMIS_ON_64BIT_ARCH)
1085 static_assert(kSmiTag == 0 && kSmiShift < kSystemPointerSizeLog2);
1086 ShiftLeftU64(dst, src, Operand(kSystemPointerSizeLog2 - kSmiShift));
1087#else
1088 static_assert(kSmiTag == 0 && kSmiShift > kSystemPointerSizeLog2);
1089 ShiftRightS64(dst, src, Operand(kSmiShift - kSystemPointerSizeLog2));
1090#endif
1091 }
1092
1093 // Loads a field containing any tagged value and decompresses it if necessary.
1095 const MemOperand& field_operand,
1096 const Register& scratch = no_reg);
1098 Register scratch);
1099
1100 // Compresses and stores tagged value to given on-heap location.
1101 void StoreTaggedField(const Register& value,
1102 const MemOperand& dst_field_operand,
1103 const Register& scratch = no_reg);
1104
1105 void Zero(const MemOperand& dest);
1106 void Zero(const MemOperand& dest1, const MemOperand& dest2);
1107
1113
1114 void LoadF64(DoubleRegister dst, const MemOperand& mem,
1115 Register scratch = no_reg);
1116 void LoadF32(DoubleRegister dst, const MemOperand& mem,
1117 Register scratch = no_reg);
1118
1119 void StoreF32(DoubleRegister src, const MemOperand& mem,
1120 Register scratch = no_reg);
1121 void StoreF64(DoubleRegister src, const MemOperand& mem,
1122 Register scratch = no_reg);
1123
1125 Register scratch = no_reg);
1127 Register scratch = no_reg);
1128
1130 Register scratch = no_reg);
1132 Register scratch = no_reg);
1133
1134 void LoadU64(Register dst, const MemOperand& mem, Register scratch = no_reg);
1135 void LoadU32(Register dst, const MemOperand& mem, Register scratch = no_reg);
1136 void LoadS32(Register dst, const MemOperand& mem, Register scratch = no_reg);
1137 void LoadU16(Register dst, const MemOperand& mem, Register scratch = no_reg);
1138 void LoadS16(Register dst, const MemOperand& mem, Register scratch = no_reg);
1139 void LoadU8(Register dst, const MemOperand& mem, Register scratch = no_reg);
1140 void LoadS8(Register dst, const MemOperand& mem, Register scratch = no_reg);
1141
1142 void StoreU64(Register src, const MemOperand& mem, Register scratch = no_reg);
1143 void StoreU32(Register src, const MemOperand& mem, Register scratch);
1144 void StoreU16(Register src, const MemOperand& mem, Register scratch);
1145 void StoreU8(Register src, const MemOperand& mem, Register scratch);
1146
1148 Register scratch = no_reg);
1150 Register scratch = no_reg);
1151
1152 void LoadU64LE(Register dst, const MemOperand& mem, Register scratch);
1153 void LoadU32LE(Register dst, const MemOperand& mem, Register scratch);
1154 void LoadU16LE(Register dst, const MemOperand& mem, Register scratch);
1155 void StoreU64LE(Register src, const MemOperand& mem, Register scratch);
1156 void StoreU32LE(Register src, const MemOperand& mem, Register scratch);
1157 void StoreU16LE(Register src, const MemOperand& mem, Register scratch);
1158
1159 void LoadS32LE(Register dst, const MemOperand& mem, Register scratch);
1160 void LoadS16LE(Register dst, const MemOperand& mem, Register scratch);
1161
1162 void LoadF64LE(DoubleRegister dst, const MemOperand& mem, Register scratch,
1163 Register scratch2);
1164 void LoadF32LE(DoubleRegister dst, const MemOperand& mem, Register scratch,
1165 Register scratch2);
1166
1167 void StoreF32LE(DoubleRegister src, const MemOperand& mem, Register scratch,
1168 Register scratch2);
1169 void StoreF64LE(DoubleRegister src, const MemOperand& mem, Register scratch,
1170 Register scratch2);
1171
1172 // Simd Support.
1173#define SIMD_BINOP_LIST(V) \
1174 V(F64x2Add) \
1175 V(F64x2Sub) \
1176 V(F64x2Mul) \
1177 V(F64x2Div) \
1178 V(F64x2Eq) \
1179 V(F64x2Lt) \
1180 V(F64x2Le) \
1181 V(F32x4Add) \
1182 V(F32x4Sub) \
1183 V(F32x4Mul) \
1184 V(F32x4Div) \
1185 V(F32x4Min) \
1186 V(F32x4Max) \
1187 V(F32x4Eq) \
1188 V(F32x4Lt) \
1189 V(F32x4Le) \
1190 V(I64x2Add) \
1191 V(I64x2Sub) \
1192 V(I64x2Eq) \
1193 V(I64x2GtS) \
1194 V(I32x4MinS) \
1195 V(I32x4MinU) \
1196 V(I32x4MaxS) \
1197 V(I32x4MaxU) \
1198 V(I32x4Add) \
1199 V(I32x4Sub) \
1200 V(I32x4Mul) \
1201 V(I32x4Eq) \
1202 V(I32x4GtS) \
1203 V(I32x4GtU) \
1204 V(I32x4DotI16x8S) \
1205 V(I16x8Add) \
1206 V(I16x8Sub) \
1207 V(I16x8Mul) \
1208 V(I16x8MinS) \
1209 V(I16x8MinU) \
1210 V(I16x8MaxS) \
1211 V(I16x8MaxU) \
1212 V(I16x8Eq) \
1213 V(I16x8GtS) \
1214 V(I16x8GtU) \
1215 V(I16x8AddSatS) \
1216 V(I16x8SubSatS) \
1217 V(I16x8AddSatU) \
1218 V(I16x8SubSatU) \
1219 V(I16x8SConvertI32x4) \
1220 V(I16x8UConvertI32x4) \
1221 V(I16x8RoundingAverageU) \
1222 V(I16x8Q15MulRSatS) \
1223 V(I8x16Add) \
1224 V(I8x16Sub) \
1225 V(I8x16MinS) \
1226 V(I8x16MinU) \
1227 V(I8x16MaxS) \
1228 V(I8x16MaxU) \
1229 V(I8x16Eq) \
1230 V(I8x16GtS) \
1231 V(I8x16GtU) \
1232 V(I8x16AddSatS) \
1233 V(I8x16SubSatS) \
1234 V(I8x16AddSatU) \
1235 V(I8x16SubSatU) \
1236 V(I8x16SConvertI16x8) \
1237 V(I8x16UConvertI16x8) \
1238 V(I8x16RoundingAverageU) \
1239 V(S128And) \
1240 V(S128Or) \
1241 V(S128Xor) \
1242 V(S128AndNot)
1243
1244#define PROTOTYPE_SIMD_BINOP(name) \
1245 void name(Simd128Register dst, Simd128Register src1, Simd128Register src2);
1247#undef PROTOTYPE_SIMD_BINOP
1248#undef SIMD_BINOP_LIST
1249
1250#define SIMD_BINOP_WITH_SCRATCH_LIST(V) \
1251 V(F64x2Ne) \
1252 V(F64x2Pmin) \
1253 V(F64x2Pmax) \
1254 V(F32x4Ne) \
1255 V(F32x4Pmin) \
1256 V(F32x4Pmax) \
1257 V(I64x2Ne) \
1258 V(I64x2GeS) \
1259 V(I64x2ExtMulLowI32x4S) \
1260 V(I64x2ExtMulHighI32x4S) \
1261 V(I64x2ExtMulLowI32x4U) \
1262 V(I64x2ExtMulHighI32x4U) \
1263 V(I32x4Ne) \
1264 V(I32x4GeS) \
1265 V(I32x4GeU) \
1266 V(I32x4ExtMulLowI16x8S) \
1267 V(I32x4ExtMulHighI16x8S) \
1268 V(I32x4ExtMulLowI16x8U) \
1269 V(I32x4ExtMulHighI16x8U) \
1270 V(I16x8Ne) \
1271 V(I16x8GeS) \
1272 V(I16x8GeU) \
1273 V(I16x8ExtMulLowI8x16S) \
1274 V(I16x8ExtMulHighI8x16S) \
1275 V(I16x8ExtMulLowI8x16U) \
1276 V(I16x8ExtMulHighI8x16U) \
1277 V(I16x8DotI8x16S) \
1278 V(I8x16Ne) \
1279 V(I8x16GeS) \
1280 V(I8x16GeU) \
1281 V(I8x16Swizzle)
1282
1283#define PROTOTYPE_SIMD_BINOP_WITH_SCRATCH(name) \
1284 void name(Simd128Register dst, Simd128Register src1, Simd128Register src2, \
1285 Simd128Register scratch);
1287#undef PROTOTYPE_SIMD_BINOP_WITH_SCRATCH
1288#undef SIMD_BINOP_WITH_SCRATCH_LIST
1289
1290#define SIMD_SHIFT_LIST(V) \
1291 V(I64x2Shl) \
1292 V(I64x2ShrS) \
1293 V(I64x2ShrU) \
1294 V(I32x4Shl) \
1295 V(I32x4ShrS) \
1296 V(I32x4ShrU) \
1297 V(I16x8Shl) \
1298 V(I16x8ShrS) \
1299 V(I16x8ShrU) \
1300 V(I8x16Shl) \
1301 V(I8x16ShrS) \
1302 V(I8x16ShrU)
1303
1304#define PROTOTYPE_SIMD_SHIFT(name) \
1305 void name(Simd128Register dst, Simd128Register src1, Register src2, \
1306 Simd128Register scratch); \
1307 void name(Simd128Register dst, Simd128Register src1, const Operand& src2, \
1308 Register scratch1, Simd128Register scratch2);
1310#undef PROTOTYPE_SIMD_SHIFT
1311#undef SIMD_SHIFT_LIST
1312
1313#define SIMD_BITMASK_LIST(V) \
1314 V(I64x2BitMask) \
1315 V(I32x4BitMask) \
1316 V(I16x8BitMask)
1317
1318#define PROTOTYPE_SIMD_BITMASK(name) \
1319 void name(Register dst, Simd128Register src, Register scratch1, \
1320 Simd128Register scratch2);
1322#undef PROTOTYPE_SIMD_BITMASK
1323#undef SIMD_BITMASK_LIST
1324
1325#define SIMD_UNOP_LIST(V) \
1326 V(F64x2Abs) \
1327 V(F64x2Neg) \
1328 V(F64x2Sqrt) \
1329 V(F64x2Ceil) \
1330 V(F64x2Floor) \
1331 V(F64x2Trunc) \
1332 V(F64x2PromoteLowF32x4) \
1333 V(F32x4Abs) \
1334 V(F32x4Neg) \
1335 V(F32x4Sqrt) \
1336 V(F32x4Ceil) \
1337 V(F32x4Floor) \
1338 V(F32x4Trunc) \
1339 V(F32x4SConvertI32x4) \
1340 V(F32x4UConvertI32x4) \
1341 V(I64x2Neg) \
1342 V(F64x2ConvertLowI32x4S) \
1343 V(I64x2SConvertI32x4Low) \
1344 V(I64x2SConvertI32x4High) \
1345 V(I32x4Neg) \
1346 V(I32x4SConvertI16x8Low) \
1347 V(I32x4SConvertI16x8High) \
1348 V(I32x4UConvertF32x4) \
1349 V(I16x8SConvertI8x16Low) \
1350 V(I16x8SConvertI8x16High) \
1351 V(I8x16Popcnt) \
1352 V(S128Not)
1353
1354#define PROTOTYPE_SIMD_UNOP(name) \
1355 void name(Simd128Register dst, Simd128Register src);
1357#undef PROTOTYPE_SIMD_UNOP
1358#undef SIMD_UNOP_LIST
1359
1360#define SIMD_UNOP_WITH_SCRATCH_LIST(V) \
1361 V(F32x4DemoteF64x2Zero) \
1362 V(I64x2Abs) \
1363 V(I32x4Abs) \
1364 V(I32x4SConvertF32x4) \
1365 V(I32x4TruncSatF64x2SZero) \
1366 V(I32x4TruncSatF64x2UZero) \
1367 V(I16x8Abs) \
1368 V(I16x8Neg) \
1369 V(I8x16Abs) \
1370 V(I8x16Neg)
1371
1372#define PROTOTYPE_SIMD_UNOP_WITH_SCRATCH(name) \
1373 void name(Simd128Register dst, Simd128Register src, Simd128Register scratch);
1375#undef PROTOTYPE_SIMD_UNOP_WITH_SCRATCH
1376#undef SIMD_UNOP_WITH_SCRATCH_LIST
1377
1378#define SIMD_ALL_TRUE_LIST(V) \
1379 V(I64x2AllTrue) \
1380 V(I32x4AllTrue) \
1381 V(I16x8AllTrue) \
1382 V(I8x16AllTrue)
1383
1384#define PROTOTYPE_SIMD_ALL_TRUE(name) \
1385 void name(Register dst, Simd128Register src, Register scratch1, \
1386 Register scratch2, Simd128Register scratch3);
1388#undef PROTOTYPE_SIMD_ALL_TRUE
1389#undef SIMD_ALL_TRUE_LIST
1390
1391#define SIMD_QFM_LIST(V) \
1392 V(F64x2Qfma) \
1393 V(F64x2Qfms) \
1394 V(F32x4Qfma) \
1395 V(F32x4Qfms)
1396#define PROTOTYPE_SIMD_QFM(name) \
1397 void name(Simd128Register dst, Simd128Register src1, Simd128Register src2, \
1398 Simd128Register src3, Simd128Register scratch);
1400#undef PROTOTYPE_SIMD_QFM
1401#undef SIMD_QFM_LIST
1402
1403#define SIMD_EXT_ADD_PAIRWISE_LIST(V) \
1404 V(I32x4ExtAddPairwiseI16x8S) \
1405 V(I32x4ExtAddPairwiseI16x8U) \
1406 V(I16x8ExtAddPairwiseI8x16S) \
1407 V(I16x8ExtAddPairwiseI8x16U)
1408#define PROTOTYPE_SIMD_EXT_ADD_PAIRWISE(name) \
1409 void name(Simd128Register dst, Simd128Register src, \
1410 Simd128Register scratch1, Simd128Register scratch2);
1412#undef PROTOTYPE_SIMD_EXT_ADD_PAIRWISE
1413#undef SIMD_EXT_ADD_PAIRWISE_LIST
1414
1416 Register scratch);
1418 Register scratch);
1420 Register scratch);
1422 Register scratch1, Simd128Register scratch2);
1424 Register scratch);
1426 Register scratch);
1428 Register scratch);
1430 Register scratch);
1432 Register scratch);
1434 Register scratch);
1436 Register scratch);
1438 Register scratch);
1439 void LoadLane64LE(Simd128Register dst, const MemOperand& mem, int lane,
1440 Register scratch1, Simd128Register scratch2);
1441 void LoadLane32LE(Simd128Register dst, const MemOperand& mem, int lane,
1442 Register scratch1, Simd128Register scratch2);
1443 void LoadLane16LE(Simd128Register dst, const MemOperand& mem, int lane,
1444 Register scratch1, Simd128Register scratch2);
1445 void LoadLane8LE(Simd128Register dst, const MemOperand& mem, int lane,
1446 Register scratch1, Simd128Register scratch2);
1447 void StoreLane64LE(Simd128Register src, const MemOperand& mem, int lane,
1448 Register scratch1, Simd128Register scratch2);
1449 void StoreLane32LE(Simd128Register src, const MemOperand& mem, int lane,
1450 Register scratch1, Simd128Register scratch2);
1451 void StoreLane16LE(Simd128Register src, const MemOperand& mem, int lane,
1452 Register scratch1, Simd128Register scratch2);
1453 void StoreLane8LE(Simd128Register src, const MemOperand& mem, int lane,
1454 Register scratch1, Simd128Register scratch2);
1456 Register scratch);
1458 Register scratch);
1460 Register scratch);
1462 Register scratch);
1464 Register scratch);
1466 Register scratch1, Simd128Register scratch2);
1468 Register scratch);
1470 Register scratch1, Simd128Register scratch2);
1472 Register scratch);
1474 Register scratch1, Simd128Register scratch2);
1476 Register scratch1, Simd128Register scratch2);
1478 Register scratch1, Simd128Register scratch2);
1481 DoubleRegister scratch1, Register scratch2);
1487 uint8_t imm_lane_idx, Simd128Register scratch1,
1488 Register scratch2);
1490 uint8_t imm_lane_idx, Simd128Register scratch1,
1491 Register scratch2, Register scratch3);
1492 void I64x2ExtractLane(Register dst, Simd128Register src, uint8_t imm_lane_idx,
1493 Simd128Register scratch);
1494 void I32x4ExtractLane(Register dst, Simd128Register src, uint8_t imm_lane_idx,
1495 Simd128Register scratch);
1497 uint8_t imm_lane_idx, Simd128Register scratch);
1499 uint8_t imm_lane_idx, Simd128Register scratch);
1501 uint8_t imm_lane_idx, Simd128Register scratch);
1503 uint8_t imm_lane_idx, Simd128Register scratch);
1505 DoubleRegister src2, uint8_t imm_lane_idx,
1506 Register scratch1, Simd128Register scratch2);
1508 DoubleRegister src2, uint8_t imm_lane_idx,
1509 Register scratch1, DoubleRegister scratch2,
1510 Simd128Register scratch3);
1512 Register src2, uint8_t imm_lane_idx,
1513 Simd128Register scratch);
1515 Register src2, uint8_t imm_lane_idx,
1516 Simd128Register scratch);
1518 Register src2, uint8_t imm_lane_idx,
1519 Simd128Register scratch);
1521 Register src2, uint8_t imm_lane_idx,
1522 Simd128Register scratch);
1524 Register scratch1, Register scrahc2, Register scratch3,
1525 Simd128Register scratch4);
1527 Simd128Register scratch1, Simd128Register scratch2);
1529 Simd128Register scratch1, Simd128Register scratch2);
1531 Register scratch1, Simd128Register scratch2);
1533 Register scratch1, Simd128Register scratch2);
1535 Register scratch1, Simd128Register scratch2);
1537 Register scratch1, Simd128Register scratch2);
1539 Register scratch1, Simd128Register scratch2);
1541 Register scratch1, Simd128Register scratch2);
1543 Register scratch1, Simd128Register scratch2);
1545 Register scratch2, Simd128Register scratch3);
1547 Simd128Register src2, uint64_t high, uint64_t low,
1548 Register scratch1, Register scratch2,
1549 Simd128Register scratch3);
1551 Simd128Register src2, Simd128Register src3);
1553 Register scratch2, Simd128Register scratch3);
1554 void S128Const(Simd128Register dst, uint64_t high, uint64_t low,
1555 Register scratch1, Register scratch2);
1558
1559 // It assumes that the arguments are located below the stack pointer.
1560 void LoadReceiver(Register dest) { LoadU64(dest, MemOperand(sp, 0)); }
1561 void StoreReceiver(Register rec) { StoreU64(rec, MemOperand(sp, 0)); }
1562
1563 // ---------------------------------------------------------------------------
1564 // GC Support
1565
1566 // Notify the garbage collector that we wrote a pointer into an object.
1567 // |object| is the object being stored into, |value| is the object being
1568 // stored. value and scratch registers are clobbered by the operation.
1569 // The offset is the offset from the start of the object, not the offset from
1570 // the tagged HeapObject pointer. For use with FieldMemOperand(reg, off).
1572 Register object, int offset, Register value, Register slot_address,
1573 LinkRegisterStatus lr_status, SaveFPRegsMode save_fp,
1574 SmiCheck smi_check = SmiCheck::kInline,
1575 SlotDescriptor slot = SlotDescriptor::ForDirectPointerSlot());
1576
1577 // For a given |object| notify the garbage collector that the slot |address|
1578 // has been written. |value| is the object being stored. The value and
1579 // address registers are clobbered by the operation.
1581 Register object, Register slot_address, Register value,
1582 LinkRegisterStatus lr_status, SaveFPRegsMode save_fp,
1583 SmiCheck smi_check = SmiCheck::kInline,
1584 SlotDescriptor slot = SlotDescriptor::ForDirectPointerSlot());
1585
1586 // Enter exit frame.
1587 // stack_space - extra stack space, used for parameters before call to C.
1588 void EnterExitFrame(Register scratch, int stack_space,
1589 StackFrame::Type frame_type);
1590
1591 // Leave the current exit frame.
1593
1594 // Load the global proxy from the current context.
1596 LoadNativeContextSlot(dst, Context::GLOBAL_PROXY_INDEX);
1597 }
1598
1599 void LoadNativeContextSlot(Register dst, int index);
1600
1601 // ----------------------------------------------------------------
1602 // new PPC macro-assembler interfaces that are slightly higher level
1603 // than assembler-ppc and may generate variable length sequences
1604
1605 // load a literal double value <value> to FPR <result>
1606
1608 Register scratch);
1610 Register scratch);
1612 CRegister cr = cr0);
1614 CRegister cr = cr0);
1616 Register scratch, RCBit rc = LeaveRC);
1617
1618 // ---------------------------------------------------------------------------
1619 // JavaScript invokes
1620
1621 // Removes current frame and its arguments from the stack preserving
1622 // the arguments and a return address pushed to the stack for the next call.
1623 // Both |callee_args_count| and |caller_args_countg| do not include
1624 // receiver. |callee_args_count| is not modified. |caller_args_count|
1625 // is trashed.
1626
1627 // Invoke the JavaScript function code by either calling or jumping.
1629 Register expected_parameter_count,
1630 Register actual_parameter_count, InvokeType type);
1631
1632 // On function call, call into the debugger if necessary.
1634 Register expected_parameter_count,
1635 Register actual_parameter_count);
1636
1637 // Invoke the JavaScript function in the given register. Changes the
1638 // current context to the context in the function before invoking.
1640 Register actual_parameter_count,
1641 InvokeType type);
1642 void InvokeFunction(Register function, Register expected_parameter_count,
1643 Register actual_parameter_count, InvokeType type);
1644
1645 // Exception handling
1646
1647 // Push a new stack handler and link into stack handler chain.
1649
1650 // Unlink the stack handler on top of the stack from the stack handler chain.
1651 // Must preserve the result register.
1653
1654 // ---------------------------------------------------------------------------
1655 // Support functions.
1656
1657 // Compare object type for heap object. heap_object contains a non-Smi
1658 // whose object type should be compared with the given type. This both
1659 // sets the flags and leaves the object type in the type_reg register.
1660 // It leaves the map in the map register (unless the type_reg and map register
1661 // are the same register). It leaves the heap object in the heap_object
1662 // register unless the heap_object register is the same register as one of the
1663 // other registers.
1664 // Type_reg can be no_reg. In that case ip is used.
1665 void CompareObjectType(Register heap_object, Register map, Register type_reg,
1666 InstanceType type);
1667 // Variant of the above, which compares against a type range rather than a
1668 // single type (lower_limit and higher_limit are inclusive).
1669 //
1670 // Always use unsigned comparisons: ls for a positive result.
1672 Register type_reg, Register scratch,
1673 InstanceType lower_limit,
1674 InstanceType higher_limit);
1675
1676 // Variant of the above, which only guarantees to set the correct eq/ne flag.
1677 // Neither map, nor type_reg might be set to any particular value.
1678 void IsObjectType(Register heap_object, Register scratch1, Register scratch2,
1679 InstanceType type);
1680
1681#if V8_STATIC_ROOTS_BOOL
1682 // Fast variant which is guaranteed to not actually load the instance type
1683 // from the map.
1684 void IsObjectTypeFast(Register heap_object, Register compressed_map_scratch,
1685 InstanceType type, Register scratch);
1686 void CompareInstanceTypeWithUniqueCompressedMap(Register map,
1687 Register scratch,
1688 InstanceType type);
1689#endif // V8_STATIC_ROOTS_BOOL
1690
1691 // Compare object type for heap object, and branch if equal (or not.)
1692 // heap_object contains a non-Smi whose object type should be compared with
1693 // the given type. This both sets the flags and leaves the object type in
1694 // the type_reg register. It leaves the map in the map register (unless the
1695 // type_reg and map register are the same register). It leaves the heap
1696 // object in the heap_object register unless the heap_object register is the
1697 // same register as one of the other registers.
1698 void JumpIfObjectType(Register object, Register map, Register type_reg,
1699 InstanceType type, Label* if_cond_pass,
1700 Condition cond = eq);
1701
1702 // Compare instance type in a map. map contains a valid map object whose
1703 // object type should be compared with the given type. This both
1704 // sets the flags and leaves the object type in the type_reg register.
1706
1707 // Compare instance type ranges for a map (lower_limit and higher_limit
1708 // inclusive).
1709 //
1710 // Always use unsigned comparisons: ls for a positive result.
1712 Register scratch, InstanceType lower_limit,
1713 InstanceType higher_limit);
1714
1715 // Compare the object in a register to a value from the root list.
1716 // Uses the ip register as scratch.
1718 void CompareTaggedRoot(const Register& with, RootIndex index);
1719
1720 void PushRoot(RootIndex index) {
1721 LoadRoot(r0, index);
1722 Push(r0);
1723 }
1724
1725 // Compare the object in a register to a value and jump if they are equal.
1726 void JumpIfRoot(Register with, RootIndex index, Label* if_equal) {
1727 CompareRoot(with, index);
1728 beq(if_equal);
1729 }
1730
1731 // Compare the object in a register to a value and jump if they are not equal.
1732 void JumpIfNotRoot(Register with, RootIndex index, Label* if_not_equal) {
1733 CompareRoot(with, index);
1734 bne(if_not_equal);
1735 }
1736
1737 // Checks if value is in range [lower_limit, higher_limit] using a single
1738 // comparison.
1739 void CompareRange(Register value, Register scratch, unsigned lower_limit,
1740 unsigned higher_limit);
1741 void JumpIfIsInRange(Register value, Register scratch, unsigned lower_limit,
1742 unsigned higher_limit, Label* on_in_range);
1743
1745 Register heap_object, Register scratch, Label* target,
1746 Label::Distance distance = Label::kFar,
1747 Condition condition = Condition::kUnsignedGreaterThanEqual);
1748 void JumpIfJSAnyIsPrimitive(Register heap_object, Register scratch,
1749 Label* target,
1750 Label::Distance distance = Label::kFar) {
1751 return JumpIfJSAnyIsNotPrimitive(heap_object, scratch, target, distance,
1752 Condition::kUnsignedLessThan);
1753 }
1754
1755 // Tiering support.
1761 Register closure, Register scratch1,
1762 Register slot_address);
1765 Register flags, Register feedback_vector, CodeKind current_code_kind,
1766 Label* flags_need_processing);
1768 Register feedback_vector);
1769
1770 // ---------------------------------------------------------------------------
1771 // Runtime calls
1772
1773 static int CallSizeNotPredictableCodeSize(Address target,
1774 RelocInfo::Mode rmode,
1775 Condition cond = al);
1776 void CallJSEntry(Register target);
1777
1778 // Call a runtime routine.
1779 void CallRuntime(const Runtime::Function* f, int num_arguments);
1780
1781 // Convenience function: Same as above, but takes the fid instead.
1783 const Runtime::Function* function = Runtime::FunctionForId(fid);
1784 CallRuntime(function, function->nargs);
1785 }
1786
1787 // Convenience function: Same as above, but takes the fid instead.
1788 void CallRuntime(Runtime::FunctionId fid, int num_arguments) {
1789 CallRuntime(Runtime::FunctionForId(fid), num_arguments);
1790 }
1791
1792 // Convenience function: tail call a runtime routine (jump).
1794
1795 // Jump to a runtime routine.
1797 bool builtin_exit_frame = false);
1798
1799 // ---------------------------------------------------------------------------
1800 // In-place weak references.
1801 void LoadWeakValue(Register out, Register in, Label* target_if_cleared);
1802
1803 // ---------------------------------------------------------------------------
1804 // StatsCounter support
1805
1806 void IncrementCounter(StatsCounter* counter, int value, Register scratch1,
1807 Register scratch2) {
1808 if (!v8_flags.native_code_counters) return;
1809 EmitIncrementCounter(counter, value, scratch1, scratch2);
1810 }
1811 void EmitIncrementCounter(StatsCounter* counter, int value, Register scratch1,
1812 Register scratch2);
1813 void DecrementCounter(StatsCounter* counter, int value, Register scratch1,
1814 Register scratch2) {
1815 if (!v8_flags.native_code_counters) return;
1816 EmitDecrementCounter(counter, value, scratch1, scratch2);
1817 }
1818 void EmitDecrementCounter(StatsCounter* counter, int value, Register scratch1,
1819 Register scratch2);
1820
1821 // ---------------------------------------------------------------------------
1822 // Stack limit utilities
1823
1824 void StackOverflowCheck(Register num_args, Register scratch,
1825 Label* stack_overflow);
1827 Register scratch);
1828
1829 // ---------------------------------------------------------------------------
1830 // Smi utilities
1831
1832 // Jump if either of the registers contain a non-smi.
1833 inline void JumpIfNotSmi(Register value, Label* not_smi_label) {
1834 TestIfSmi(value, r0);
1835 bne(not_smi_label, cr0);
1836 }
1837
1838#if !defined(V8_COMPRESS_POINTERS) && !defined(V8_31BIT_SMIS_ON_64BIT_ARCH)
1839 // Ensure it is permissible to read/write int value directly from
1840 // upper half of the smi.
1841 static_assert(kSmiTag == 0);
1842 static_assert(kSmiTagSize + kSmiShiftSize == 32);
1843#endif
1844#if V8_TARGET_ARCH_PPC64 && V8_TARGET_LITTLE_ENDIAN
1845#define SmiWordOffset(offset) (offset + kSystemPointerSize / 2)
1846#else
1847#define SmiWordOffset(offset) offset
1848#endif
1849
1850 // Abort execution if argument is not a Constructor, enabled via --debug-code.
1852
1853 // Abort execution if argument is not a JSFunction, enabled via --debug-code.
1855
1856 // Abort execution if argument is not a callable JSFunction, enabled via
1857 // --debug-code.
1859
1860 // Abort execution if argument is not a JSBoundFunction,
1861 // enabled via --debug-code.
1863
1864 // Abort execution if argument is not a JSGeneratorObject (or subclass),
1865 // enabled via --debug-code.
1867
1868 // Abort execution if argument is not undefined or an AllocationSite, enabled
1869 // via --debug-code.
1872
1873 void AssertJSAny(Register object, Register map_tmp, Register tmp,
1874 AbortReason abort_reason) NOOP_UNLESS_DEBUG_CODE;
1875 // ---------------------------------------------------------------------------
1876 // Patching helpers.
1877
1878 template <typename Field>
1879 void DecodeField(Register dst, Register src, RCBit rc = LeaveRC) {
1880 ExtractBitRange(dst, src, Field::kShift + Field::kSize - 1, Field::kShift,
1881 rc);
1882 }
1883
1884 template <typename Field>
1885 void DecodeField(Register reg, RCBit rc = LeaveRC) {
1886 DecodeField<Field>(reg, reg, rc);
1887 }
1888
1890 Register scratch2);
1892
1893 private:
1895
1896 int CalculateStackPassedWords(int num_reg_arguments,
1897 int num_double_arguments);
1898
1899 // Helper functions for generating invokes.
1900 void InvokePrologue(Register expected_parameter_count,
1901 Register actual_parameter_count, InvokeType type);
1902
1904};
1905
1906struct MoveCycleState {
1907 // Whether a move in the cycle needs a double scratch register.
1909};
1910
1911// Provides access to exit frame parameters (GC-ed).
1913 // The slot at [sp] is reserved in all ExitFrames for storing the return
1914 // address before doing the actual call, it's necessary for frame iteration
1915 // (see StoreReturnAddressAndCall for details).
1916 static constexpr int kSPOffset = 1 * kSystemPointerSize;
1918 offset + kSPOffset);
1919}
1920
1921// Provides access to exit frame stack space (not GC-ed).
1923 return MemOperand(
1926}
1927
1928// Calls an API function. Allocates HandleScope, extracts returned value
1929// from handle and propagates exceptions. Clobbers C argument registers
1930// and C caller-saved registers. Restores context. On return removes
1931// (*argc_operand + slots_to_drop_on_return) * kSystemPointerSize
1932// (GCed, includes the call JS arguments space and the additional space
1933// allocated for the fast call).
1934void CallApiFunctionAndReturn(MacroAssembler* masm, bool with_profiling,
1935 Register function_address,
1936 ExternalReference thunk_ref, Register thunk_arg,
1937 int slots_to_drop_on_return,
1938 MemOperand* argc_operand,
1939 MemOperand return_value_operand);
1940
1941#define ACCESS_MASM(masm) masm->
1942
1943} // namespace internal
1944} // namespace v8
1945
1946#endif // V8_CODEGEN_PPC_MACRO_ASSEMBLER_PPC_H_
Builtins::Kind kind
Definition builtins.cc:40
static constexpr int kFixedSlotCountAboveFp
void JumpIfJSAnyIsPrimitive(Register heap_object, Register scratch, Label *target, Label::Distance distance=Label::kFar)
void F32x4ExtractLane(DoubleRegister dst, Simd128Register src, uint8_t imm_lane_idx, Simd128Register scratch1, Register scratch2, Register scratch3)
void LoadSimd128LE(Simd128Register dst, const MemOperand &mem, Register scratch)
void DivF32(DoubleRegister dst, DoubleRegister lhs, DoubleRegister rhs, RCBit r=LeaveRC)
void JumpIfRoot(Register with, RootIndex index, Label *if_equal)
void Abort(AbortReason reason)
void Push(Register src1, Register src2, Register src3, Register src4, Register src5)
void MovToFloatParameters(DoubleRegister src1, DoubleRegister src2)
void SwapSimd128(MemOperand src, MemOperand dst, Simd128Register scratch1, Simd128Register scratch2, Register scratch3)
void MaxF64(DoubleRegister dst, DoubleRegister lhs, DoubleRegister rhs, DoubleRegister scratch=kScratchDoubleReg)
void ClearByteU64(Register dst, int byte_idx)
void SmiUntag(Register dst, const MemOperand &src, RCBit rc=LeaveRC, Register scratch=no_reg)
void ConvertIntToFloat(Register src, DoubleRegister dst)
void ZeroExtend(Register dst, Register value)
void InsertDoubleHigh(DoubleRegister dst, Register src, Register scratch)
void StoreSandboxedPointerField(Register value, const MemOperand &dst_field_operand, Register scratch=no_reg)
void LoadSimd128Uint64(Simd128Register reg, const MemOperand &mem, Register scratch)
void MultiPushF64AndV128(DoubleRegList dregs, Simd128RegList simd_regs, Register scratch1, Register scratch2, Register location=sp)
void ConvertIntToDouble(Register src, DoubleRegister dst)
void Pop(Register src1, Register src2, Register src3)
void LoadTaggedField(const Register &destination, const MemOperand &field_operand, const Register &scratch=no_reg)
void LoadS16LE(Register dst, const MemOperand &mem, Register scratch)
void S128Select(Simd128Register dst, Simd128Register src1, Simd128Register src2, Simd128Register mask)
void I64x2UConvertI32x4High(Simd128Register dst, Simd128Register src, Register scratch1, Simd128Register scratch2)
void CountTrailingZerosU64(Register dst, Register src, Register scratch1=ip, Register scratch2=r0, RCBit r=LeaveRC)
void LoadAndSplat64x2LE(Simd128Register dst, const MemOperand &mem, Register scratch)
void LoadU64LE(Register dst, const MemOperand &mem, Register scratch)
void JumpIfIsInRange(Register value, Register scratch, unsigned lower_limit, unsigned higher_limit, Label *on_in_range)
void ByteReverseU64(Register dst, Register val, Register=r0)
void I16x8UConvertI8x16Low(Simd128Register dst, Simd128Register src, Register scratch1, Simd128Register scratch2)
int LeaveFrame(StackFrame::Type type, int stack_adjustment=0)
void LoadU8(Register dst, const MemOperand &mem, Register scratch=no_reg)
void CompareInstanceType(Register map, Register type_reg, InstanceType type)
void CallRuntime(Runtime::FunctionId fid)
void LoadAndSplat16x8LE(Simd128Register dst, const MemOperand &me, Register scratch)
int CalculateStackPassedWords(int num_reg_arguments, int num_double_arguments)
void AndU32(Register dst, Register src, const Operand &value, Register scratch=r0, RCBit r=SetRC)
int CallCFunction(ExternalReference function, int num_reg_arguments, int num_double_arguments, SetIsolateDataSlots set_isolate_data_slots=SetIsolateDataSlots::kYes, bool has_function_descriptor=true)
void I32x4UConvertI16x8Low(Simd128Register dst, Simd128Register src, Register scratch1, Simd128Register scratch2)
void StoreTaggedField(const Register &value, const MemOperand &dst_field_operand, const Register &scratch=no_reg)
void Call(Address target, RelocInfo::Mode rmode, Condition cond=al)
void XorU32(Register dst, Register src, const Operand &value, Register scratch=r0, RCBit r=SetRC)
void CompareTagged(Register src1, Register src2, CRegister cr=cr0)
void SignedExtend(Register dst, Register value)
void ModS32(Register dst, Register src, Register value)
void Move(Register dst, Register src, Condition cond=al)
void IsObjectType(Register heap_object, Register scratch1, Register scratch2, InstanceType type)
void ModS64(Register dst, Register src, Register value)
void RecordWrite(Register object, Register slot_address, Register value, LinkRegisterStatus lr_status, SaveFPRegsMode save_fp, SmiCheck smi_check=SmiCheck::kInline, SlotDescriptor slot=SlotDescriptor::ForDirectPointerSlot())
void AssertFunction(Register object) NOOP_UNLESS_DEBUG_CODE
void TestIfSmi(Register value, Register scratch)
void CallIndirectPointerBarrier(Register object, Register slot_address, SaveFPRegsMode fp_mode, IndirectPointerTag tag)
void MovInt64ComponentsToDouble(DoubleRegister dst, Register src_hi, Register src_lo, Register scratch)
void MulS32(Register dst, Register src, const Operand &value, Register scratch=r0, OEBit s=LeaveOE, RCBit r=LeaveRC)
void SubF64(DoubleRegister dst, DoubleRegister lhs, DoubleRegister rhs, RCBit r=LeaveRC)
void AssertGeneratorObject(Register object) NOOP_UNLESS_DEBUG_CODE
void SubSmiLiteral(Register dst, Register src, Tagged< Smi > smi, Register scratch)
void ShiftLeftU32(Register dst, Register src, Register value, RCBit r=LeaveRC)
static int CallSizeNotPredictableCodeSize(Address target, RelocInfo::Mode rmode, Condition cond=al)
void DecodeField(Register reg, RCBit rc=LeaveRC)
void JumpIfNotRoot(Register with, RootIndex index, Label *if_not_equal)
void Pop(Register src1, Register src2, Register src3, Register src4)
void MultiPopF64AndV128(DoubleRegList dregs, Simd128RegList simd_regs, Register scratch1, Register scratch2, Register location=sp)
void LoadLane32LE(Simd128Register dst, const MemOperand &mem, int lane, Register scratch1, Simd128Register scratch2)
void LoadEntryFromBuiltin(Builtin builtin, Register destination)
void CopySignF64(DoubleRegister dst, DoubleRegister lhs, DoubleRegister rhs, RCBit r=LeaveRC)
void PushStandardFrame(Register function_reg)
void AndU64(Register dst, Register src, Register value, RCBit r=SetRC)
void StoreF64(DoubleRegister src, const MemOperand &mem, Register scratch=no_reg)
void I32x4ReplaceLane(Simd128Register dst, Simd128Register src1, Register src2, uint8_t imm_lane_idx, Simd128Register scratch)
void I8x16ExtractLaneS(Register dst, Simd128Register src, uint8_t imm_lane_idx, Simd128Register scratch)
void CompareRoot(Register obj, RootIndex index)
void StoreF64WithUpdate(DoubleRegister src, const MemOperand &mem, Register scratch=no_reg)
void I64x2ExtractLane(Register dst, Simd128Register src, uint8_t imm_lane_idx, Simd128Register scratch)
void CmpU64(Register src1, Register src2, CRegister cr=cr0)
void StoreIndirectPointerField(Register value, MemOperand dst_field_operand, Register scratch)
void LoadReserve(Register output, MemOperand dst)
void LoadV64ZeroLE(Simd128Register dst, const MemOperand &mem, Register scratch1, Simd128Register scratch2)
void AddF64(DoubleRegister dst, DoubleRegister lhs, DoubleRegister rhs, RCBit r=LeaveRC)
void MovDoubleLowToInt(Register dst, DoubleRegister src)
void StoreU64WithUpdate(Register src, const MemOperand &mem, Register scratch=no_reg)
void StoreU64LE(Register src, const MemOperand &mem, Register scratch)
void LoadF32(DoubleRegister dst, const MemOperand &mem, Register scratch=no_reg)
void ConvertUnsignedIntToFloat(Register src, DoubleRegister dst)
void CompareObjectType(Register heap_object, Register map, Register type_reg, InstanceType type)
void ModU64(Register dst, Register src, Register value)
void LoadV32ZeroLE(Simd128Register dst, const MemOperand &mem, Register scratch1, Simd128Register scratch2)
void Move(Register dst, Tagged< Smi > smi)
void LoadAndExtend32x2SLE(Simd128Register dst, const MemOperand &mem, Register scratch)
void CountTrailingZerosU32(Register dst, Register src, Register scratch1=ip, Register scratch2=r0, RCBit r=LeaveRC)
void MulF64(DoubleRegister dst, DoubleRegister lhs, DoubleRegister rhs, RCBit r=LeaveRC)
void StoreConditional(Register value, MemOperand dst)
void AddF32(DoubleRegister dst, DoubleRegister lhs, DoubleRegister rhs, RCBit r=LeaveRC)
void Move(Register dst, const MemOperand &src)
void StoreF32LE(DoubleRegister src, const MemOperand &mem, Register scratch, Register scratch2)
void Check(Condition cond, AbortReason reason, CRegister cr=cr0)
void LoadExternalPointerField(Register destination, MemOperand field_operand, ExternalPointerTag tag, Register isolate_root=no_reg, Register scratch=no_reg)
void Call(Handle< Code > code, RelocInfo::Mode rmode=RelocInfo::CODE_TARGET, Condition cond=al)
void StoreReturnAddressAndCall(Register target)
void StoreU16LE(Register src, const MemOperand &mem, Register scratch)
void LeaveExitFrame(Register scratch)
void StackOverflowCheck(Register num_args, Register scratch, Label *stack_overflow)
void AssertFeedbackVector(Register object, Register scratch) NOOP_UNLESS_DEBUG_CODE
void CallBuiltinByIndex(Register builtin_index, Register target)
void ZeroExtWord32(Register dst, Register src)
void I16x8Splat(Simd128Register dst, Register src)
void LoadRootRelative(Register destination, int32_t offset) final
void JumpIfSmi(Register value, Label *smi_label)
void LoadRoot(Register destination, RootIndex index, Condition cond)
void StoreSimd128Uint16(Simd128Register reg, const MemOperand &mem, Register scratch)
void LoadPC(Register dst)
static int ActivationFrameAlignment()
void JumpIfObjectType(Register object, Register map, Register type_reg, InstanceType type, Label *if_cond_pass, Condition cond=eq)
void ShiftRightU64(Register dst, Register src, const Operand &value, RCBit r=LeaveRC)
void LoadU16(Register dst, const MemOperand &mem, Register scratch=no_reg)
void Push(Handle< HeapObject > handle)
void PrepareCallCFunction(int num_reg_arguments, Register scratch)
void MultiPopV128(Simd128RegList dregs, Register scratch, Register location=sp)
void CallCodeObject(Register code_object)
void CallEphemeronKeyBarrier(Register object, Register slot_address, SaveFPRegsMode fp_mode)
void SmiTag(Register dst, Register src, RCBit rc=LeaveRC)
void SmiUntag(Register dst, Register src, RCBit rc=LeaveRC)
void SubF32(DoubleRegister dst, DoubleRegister lhs, DoubleRegister rhs, RCBit r=LeaveRC)
void StoreCodePointerField(Register value, MemOperand dst_field_operand, Register scratch=no_reg)
void AtomicExchange(MemOperand dst, Register new_value, Register output)
void DivS32(Register dst, Register src, Register value, OEBit s=LeaveOE, RCBit r=LeaveRC)
void SwapDouble(DoubleRegister src, DoubleRegister dst, DoubleRegister scratch)
void LoadRootRegisterOffset(Register destination, intptr_t offset) final
void CanonicalizeNaN(const DoubleRegister value)
void SubAndCheckForOverflow(Register dst, Register left, Register right, Register overflow_dst, Register scratch=r0)
void LoadS32(Register dst, const MemOperand &mem, Register scratch=no_reg)
void AtomicOps(MemOperand dst, Register value, Register output, Register result, bin_op op)
void AssertSmi(Register object) NOOP_UNLESS_DEBUG_CODE
void LoadCodeInstructionStart(Register destination, Register code_object, CodeEntrypointTag tag=kDefaultCodeEntrypointTag)
void CountLeadingZerosU32(Register dst, Register src, RCBit r=LeaveRC)
void Jump(Address target, RelocInfo::Mode rmode, Condition cond=al, CRegister cr=cr0)
void LoadAndExtend8x8SLE(Simd128Register dst, const MemOperand &mem, Register scratch)
void ShiftLeftU64(Register dst, Register src, Register value, RCBit r=LeaveRC)
void I64x2Mul(Simd128Register dst, Simd128Register src1, Simd128Register src2, Register scratch1, Register scrahc2, Register scratch3, Simd128Register scratch4)
void LoadDoubleLiteral(DoubleRegister result, base::Double value, Register scratch)
void ExtractBitRange(Register dst, Register src, int rangeStart, int rangeEnd, RCBit rc=LeaveRC, bool test=false)
void LoadFeedbackVectorFlagsAndJumpIfNeedsProcessing(Register flags, Register feedback_vector, CodeKind current_code_kind, Label *flags_need_processing)
void AddS32(Register dst, Register src, Register value, RCBit r=LeaveRC)
void LoadFeedbackVector(Register dst, Register closure, Register scratch, Label *fbv_undef)
void MulS64(Register dst, Register src, Register value, OEBit s=LeaveOE, RCBit r=LeaveRC)
void F64x2Max(Simd128Register dst, Simd128Register src1, Simd128Register src2, Simd128Register scratch1, Simd128Register scratch2)
void StoreLane8LE(Simd128Register src, const MemOperand &mem, int lane, Register scratch1, Simd128Register scratch2)
void EmitIncrementCounter(StatsCounter *counter, int value, Register scratch1, Register scratch2)
void LoadSimd128Uint16(Simd128Register reg, const MemOperand &mem, Register scratch)
void Jump(Register target)
void SwapFloat32(DoubleRegister src, DoubleRegister dst, DoubleRegister scratch)
void InvokeFunctionCode(Register function, Register new_target, Register expected_parameter_count, Register actual_parameter_count, InvokeType type)
void LoadLane64LE(Simd128Register dst, const MemOperand &mem, int lane, Register scratch1, Simd128Register scratch2)
void LoadS32LE(Register dst, const MemOperand &mem, Register scratch)
void DecodeSandboxedPointer(Register value)
void XorU64(Register dst, Register src, Register value, RCBit r=LeaveRC)
void MovDoubleHighToInt(Register dst, DoubleRegister src)
int RequiredStackSizeForCallerSaved(SaveFPRegsMode fp_mode, Register exclusion1=no_reg, Register exclusion2=no_reg, Register exclusion3=no_reg) const
void CmpU32(Register src1, const Operand &src2, Register scratch, CRegister cr=cr0)
void LoadU16LE(Register dst, const MemOperand &mem, Register scratch)
void MinF64(DoubleRegister dst, DoubleRegister lhs, DoubleRegister rhs, DoubleRegister scratch=kScratchDoubleReg)
void MultiPopDoubles(DoubleRegList dregs, Register location=sp)
void Jump(const ExternalReference &reference)
void ModU32(Register dst, Register src, Register value)
void StoreSimd128Uint64(Simd128Register reg, const MemOperand &mem, Register scratch)
void JumpIfJSAnyIsNotPrimitive(Register heap_object, Register scratch, Label *target, Label::Distance distance=Label::kFar, Condition condition=Condition::kUnsignedGreaterThanEqual)
MemOperand ExternalReferenceAsOperand(IsolateFieldId id)
void OrU64(Register dst, Register src, const Operand &value, Register scratch=r0, RCBit r=SetRC)
void LoadSimd128Uint32(Simd128Register reg, const MemOperand &mem, Register scratch)
void AtomicCompareExchange(MemOperand dst, Register old_value, Register new_value, Register output, Register scratch)
void InvokePrologue(Register expected_parameter_count, Register actual_parameter_count, InvokeType type)
void Push(Register src1, Register src2, Register src3)
void StoreSimd128(Simd128Register src, const MemOperand &mem, Register scratch)
void StoreLane32LE(Simd128Register src, const MemOperand &mem, int lane, Register scratch1, Simd128Register scratch2)
void I16x8ReplaceLane(Simd128Register dst, Simd128Register src1, Register src2, uint8_t imm_lane_idx, Simd128Register scratch)
void MovDoubleToInt64(Register dst, DoubleRegister src)
void ReplaceClosureCodeWithOptimizedCode(Register optimized_code, Register closure, Register scratch1, Register slot_address)
void CmpS64(Register src1, Register src2, CRegister cr=cr0)
void F32x4ReplaceLane(Simd128Register dst, Simd128Register src1, DoubleRegister src2, uint8_t imm_lane_idx, Register scratch1, DoubleRegister scratch2, Simd128Register scratch3)
void I8x16Shuffle(Simd128Register dst, Simd128Register src1, Simd128Register src2, uint64_t high, uint64_t low, Register scratch1, Register scratch2, Simd128Register scratch3)
void ZeroExtHalfWord(Register dst, Register src)
void EnterExitFrame(Register scratch, int stack_space, StackFrame::Type frame_type)
void TestIfInt32(Register value, Register scratch, CRegister cr=cr0)
void Zero(const MemOperand &dest1, const MemOperand &dest2)
void AddS64(Register dst, Register src, const Operand &value, Register scratch=r0, OEBit s=LeaveOE, RCBit r=LeaveRC)
void ConvertUnsignedInt64ToFloat(Register src, DoubleRegister double_dst)
void SwapP(MemOperand src, MemOperand dst, Register scratch_0, Register scratch_1)
void SetRoundingMode(FPRoundingMode RN)
void LoadF64WithUpdate(DoubleRegister dst, const MemOperand &mem, Register scratch=no_reg)
void I16x8ExtractLaneS(Register dst, Simd128Register src, uint8_t imm_lane_idx, Simd128Register scratch)
void CallJSEntry(Register target)
void Assert(Condition cond, AbortReason reason, CRegister cr=cr0) NOOP_UNLESS_DEBUG_CODE
void SubS64(Register dst, Register src, const Operand &value, Register scratch=r0, OEBit s=LeaveOE, RCBit r=LeaveRC)
void ShiftRightU32(Register dst, Register src, const Operand &value, RCBit r=LeaveRC)
void SwapFloat32(MemOperand src, MemOperand dst, DoubleRegister scratch_0, DoubleRegister scratch_1)
void StoreF64LE(DoubleRegister src, const MemOperand &mem, Register scratch, Register scratch2)
MemOperand ExternalReferenceAsOperand(ExternalReference reference, Register scratch)
void Zero(const MemOperand &dest)
void IncrementCounter(StatsCounter *counter, int value, Register scratch1, Register scratch2)
void ShiftRightS32(Register dst, Register src, const Operand &value, RCBit r=LeaveRC)
void AndU64(Register dst, Register src, const Operand &value, Register scratch=r0, RCBit r=SetRC)
void MovFloatToInt(Register dst, DoubleRegister src, DoubleRegister scratch)
void SwapP(Register src, MemOperand dst, Register scratch)
void I16x8ExtractLaneU(Register dst, Simd128Register src, uint8_t imm_lane_idx, Simd128Register scratch)
void PushCommonFrame(Register marker_reg=no_reg)
void MovFromFloatParameter(DoubleRegister dst)
void LoadTrustedPointerField(Register destination, MemOperand field_operand, IndirectPointerTag tag, Register scratch=no_reg)
void JumpIfEqual(Register x, int32_t y, Label *dest)
void AndSmiLiteral(Register dst, Register src, Tagged< Smi > smi, Register scratch, RCBit rc=LeaveRC)
void SwapDouble(DoubleRegister src, MemOperand dst, DoubleRegister scratch)
void InsertDoubleLow(DoubleRegister dst, Register src, Register scratch)
void GetLabelAddress(Register dst, Label *target)
void StoreF32(DoubleRegister src, const MemOperand &mem, Register scratch=no_reg)
int PopCallerSaved(SaveFPRegsMode fp_mode, Register scratch1, Register scratch2, Register exclusion1=no_reg, Register exclusion2=no_reg, Register exclusion3=no_reg)
void TestCodeIsMarkedForDeoptimization(Register code, Register scratch1, Register scratch2)
void DecompressTaggedSigned(Register destination, MemOperand field_operand)
void V128AnyTrue(Register dst, Simd128Register src, Register scratch1, Register scratch2, Simd128Register scratch3)
void JumpToExternalReference(const ExternalReference &builtin, bool builtin_exit_frame=false)
void ShiftLeftU32(Register dst, Register src, const Operand &value, RCBit r=LeaveRC)
Operand ClearedValue() const
void SwapSimd128(Simd128Register src, Simd128Register dst, Simd128Register scratch)
int CallCFunction(Register function, int num_reg_arguments, int num_double_arguments, SetIsolateDataSlots set_isolate_data_slots=SetIsolateDataSlots::kYes, bool has_function_descriptor=true)
void CmpU64(Register src1, const Operand &src2, Register scratch, CRegister cr=cr0)
void LoadF64(DoubleRegister dst, const MemOperand &mem, Register scratch=no_reg)
void I32x4Splat(Simd128Register dst, Register src)
void InvokeFunctionWithNewTarget(Register function, Register new_target, Register actual_parameter_count, InvokeType type)
void AddS32(Register dst, Register src, const Operand &value, Register scratch=r0, RCBit r=LeaveRC)
void MovToFloatParameter(DoubleRegister src)
void LoadSimd128(Simd128Register dst, const MemOperand &mem, Register scratch)
void StoreU32(Register src, const MemOperand &mem, Register scratch)
void BindExceptionHandler(Label *label)
void LoadAndSplat8x16LE(Simd128Register dst, const MemOperand &mem, Register scratch)
void LoadRoot(Register destination, RootIndex index) final
void SubS64(Register dst, Register src, Register value, OEBit s=LeaveOE, RCBit r=LeaveRC)
void OrU64(Register dst, Register src, Register value, RCBit r=LeaveRC)
void AndU32(Register dst, Register src, Register value, RCBit r=SetRC)
void TailCallBuiltin(Builtin builtin, Condition cond=al, CRegister cr=cr0)
void ShiftRightU32(Register dst, Register src, Register value, RCBit r=LeaveRC)
void MovToFloatResult(DoubleRegister src)
void I8x16ExtractLaneU(Register dst, Simd128Register src, uint8_t imm_lane_idx, Simd128Register scratch)
void MovInt64ToDouble(DoubleRegister dst, Register src)
void Pop(Register src1, Register src2)
void LoadF32LE(DoubleRegister dst, const MemOperand &mem, Register scratch, Register scratch2)
void EnterFrame(StackFrame::Type type, bool load_constant_pool_pointer_reg=false)
void InvokeFunction(Register function, Register expected_parameter_count, Register actual_parameter_count, InvokeType type)
void TruncateDoubleToI(Isolate *isolate, Zone *zone, Register result, DoubleRegister double_input, StubCallMode stub_mode)
void LoadU32(Register dst, const MemOperand &mem, Register scratch=no_reg)
void Popcnt32(Register dst, Register src)
void OrU32(Register dst, Register src, Register value, RCBit r=LeaveRC)
void DivF64(DoubleRegister dst, DoubleRegister lhs, DoubleRegister rhs, RCBit r=LeaveRC)
void SwapP(Register src, Register dst, Register scratch)
void CompareRange(Register value, Register scratch, unsigned lower_limit, unsigned higher_limit)
void S128Const(Simd128Register dst, uint64_t high, uint64_t low, Register scratch1, Register scratch2)
void JumpCodeObject(Register code_object, JumpMode jump_mode=JumpMode::kJump)
void PushArray(Register array, Register size, Register scratch, Register scratch2, PushArrayOrder order=kNormal)
void DecrementCounter(StatsCounter *counter, int value, Register scratch1, Register scratch2)
void I64x2Splat(Simd128Register dst, Register src)
void ConvertUnsignedIntToDouble(Register src, DoubleRegister dst)
void LoadU64WithUpdate(Register dst, const MemOperand &mem, Register scratch=no_reg)
void SwapFloat32(DoubleRegister src, MemOperand dst, DoubleRegister scratch)
void ConvertUnsignedInt64ToDouble(Register src, DoubleRegister double_dst)
void EmitDecrementCounter(StatsCounter *counter, int value, Register scratch1, Register scratch2)
void I32x4UConvertI16x8High(Simd128Register dst, Simd128Register src, Register scratch1, Simd128Register scratch2)
void ShiftRightU64(Register dst, Register src, Register value, RCBit r=LeaveRC)
void LoadFromConstantsTable(Register destination, int constant_index) final
void ShiftRightS64(Register dst, Register src, const Operand &value, RCBit r=LeaveRC)
MemOperand EntryFromBuiltinAsOperand(Builtin builtin)
void LoadS8(Register dst, const MemOperand &mem, Register scratch=no_reg)
void CompareObjectTypeRange(Register heap_object, Register map, Register type_reg, Register scratch, InstanceType lower_limit, InstanceType higher_limit)
void AssertNotSmi(Register object) NOOP_UNLESS_DEBUG_CODE
void F64x2Min(Simd128Register dst, Simd128Register src1, Simd128Register src2, Simd128Register scratch1, Simd128Register scratch2)
void ComputeCodeStartAddress(Register dst)
void MaybeSaveRegisters(RegList registers)
void LoadTaggedRoot(Register destination, RootIndex index)
DISALLOW_IMPLICIT_CONSTRUCTORS(MacroAssembler)
void CmpU32(Register src1, Register src2, CRegister cr=cr0)
void I32x4ExtractLane(Register dst, Simd128Register src, uint8_t imm_lane_idx, Simd128Register scratch)
void SubS32(Register dst, Register src, const Operand &value, Register scratch=r0, RCBit r=LeaveRC)
void JumpIfNotSmi(Register value, Label *not_smi_label)
void MultiPush(RegList regs, Register location=sp)
void SwapDouble(MemOperand src, MemOperand dst, DoubleRegister scratch_0, DoubleRegister scratch_1)
void RecordWriteField(Register object, int offset, Register value, Register slot_address, LinkRegisterStatus lr_status, SaveFPRegsMode save_fp, SmiCheck smi_check=SmiCheck::kInline, SlotDescriptor slot=SlotDescriptor::ForDirectPointerSlot())
void MulS32(Register dst, Register src, Register value, OEBit s=LeaveOE, RCBit r=LeaveRC)
void LoadStackLimit(Register destination, StackLimitKind kind, Register scratch)
void I64x2ReplaceLane(Simd128Register dst, Simd128Register src1, Register src2, uint8_t imm_lane_idx, Simd128Register scratch)
void CheckDebugHook(Register fun, Register new_target, Register expected_parameter_count, Register actual_parameter_count)
void LoadLane8LE(Simd128Register dst, const MemOperand &mem, int lane, Register scratch1, Simd128Register scratch2)
void LoadAndExtend8x8ULE(Simd128Register dst, const MemOperand &mem, Register scratch1, Simd128Register scratch2)
void TestBitRange(Register value, int rangeStart, int rangeEnd, Register scratch=r0)
void AssertConstructor(Register object) NOOP_UNLESS_DEBUG_CODE
void ConvertDoubleToUnsignedInt64(const DoubleRegister double_input, const Register dst, const DoubleRegister double_dst, FPRoundingMode rounding_mode=kRoundToZero)
void CallRuntime(const Runtime::Function *f, int num_arguments)
void LoadWeakValue(Register out, Register in, Label *target_if_cleared)
void CallBuiltin(Builtin builtin, Condition cond=al)
void ExtractBit(Register dst, Register src, uint32_t bitNumber, RCBit rc=LeaveRC, bool test=false)
void CmplSmiLiteral(Register src1, Tagged< Smi > smi, Register scratch, CRegister cr=cr0)
void DivU32(Register dst, Register src, Register value, OEBit s=LeaveOE, RCBit r=LeaveRC)
void Popcnt64(Register dst, Register src)
int CallCFunction(Register function, int num_arguments, SetIsolateDataSlots set_isolate_data_slots=SetIsolateDataSlots::kYes, bool has_function_descriptor=true)
void ShiftRightS64(Register dst, Register src, Register value, RCBit r=LeaveRC)
void Pop(Register src1, Register src2, Register src3, Register src4, Register src5)
void AddSmiLiteral(Register dst, Register src, Tagged< Smi > smi, Register scratch)
void CanonicalizeNaN(const DoubleRegister dst, const DoubleRegister src)
void OrU32(Register dst, Register src, const Operand &value, Register scratch=r0, RCBit r=SetRC)
void I8x16BitMask(Register dst, Simd128Register src, Register scratch1, Register scratch2, Simd128Register scratch3)
void MulS64(Register dst, Register src, const Operand &value, Register scratch=r0, OEBit s=LeaveOE, RCBit r=LeaveRC)
void GenerateTailCallToReturnedCode(Runtime::FunctionId function_id)
void LoadAndSplat32x4LE(Simd128Register dst, const MemOperand &mem, Register scratch)
void XorU32(Register dst, Register src, Register value, RCBit r=LeaveRC)
void ShiftLeftU64(Register dst, Register src, const Operand &value, RCBit r=LeaveRC)
void LoadIntLiteral(Register dst, int value)
void CmpS64(Register src1, const Operand &src2, Register scratch, CRegister cr=cr0)
void SmiUntag(Register reg, RCBit rc=LeaveRC)
void ReverseBitsU64(Register dst, Register src, Register scratch1, Register scratch2)
void AssertJSAny(Register object, Register map_tmp, Register tmp, AbortReason abort_reason) NOOP_UNLESS_DEBUG_CODE
void Push(Tagged< Smi > smi)
void LoadSmiLiteral(Register dst, Tagged< Smi > smi)
void SubS32(Register dst, Register src, Register value, RCBit r=LeaveRC)
void ExtractBitMask(Register dst, Register src, uintptr_t mask, RCBit rc=LeaveRC, bool test=false)
void DivU64(Register dst, Register src, Register value, OEBit s=LeaveOE, RCBit r=LeaveRC)
int CallCFunction(ExternalReference function, int num_arguments, SetIsolateDataSlots set_isolate_data_slots=SetIsolateDataSlots::kYes, bool has_function_descriptor=true)
void AssertFeedbackCell(Register object, Register scratch) NOOP_UNLESS_DEBUG_CODE
void LoadU32LE(Register dst, const MemOperand &mem, Register scratch)
void MovIntToFloat(DoubleRegister dst, Register src, Register scratch)
void CallForDeoptimization(Builtin target, int deopt_id, Label *exit, DeoptimizeKind kind, Label *ret, Label *jump_deoptimization_entry_label)
void Call(Label *target)
void I32x4DotI8x16AddS(Simd128Register dst, Simd128Register src1, Simd128Register src2, Simd128Register src3)
void DropArgumentsAndPushNewReceiver(Register argc, Register receiver)
void F64x2ExtractLane(DoubleRegister dst, Simd128Register src, uint8_t imm_lane_idx, Simd128Register scratch1, Register scratch2)
void MovUnsignedIntToDouble(DoubleRegister dst, Register src, Register scratch)
void AllocateStackSpace(Register bytes)
void LoadEntryFromBuiltinIndex(Register builtin_index, Register target)
void LoadIndirectPointerField(Register destination, MemOperand field_operand, IndirectPointerTag tag, Register scratch)
void LoadAndExtend16x4SLE(Simd128Register dst, const MemOperand &mem, Register scratch)
void ReverseBitsU32(Register dst, Register src, Register scratch1, Register scratch2)
void ShiftRightS32(Register dst, Register src, Register value, RCBit r=LeaveRC)
void I16x8UConvertI8x16High(Simd128Register dst, Simd128Register src, Register scratch1, Simd128Register scratch2)
void LoadAndExtend32x2ULE(Simd128Register dst, const MemOperand &mem, Register scratch1, Simd128Register scratch2)
void CmpS32(Register src1, const Operand &src2, Register scratch, CRegister cr=cr0)
void StoreU64(Register src, const MemOperand &mem, Register scratch=no_reg)
void XorU64(Register dst, Register src, const Operand &value, Register scratch=r0, RCBit r=SetRC)
void Ret(Condition cond, CRegister cr=cr0)
void MultiPop(RegList regs, Register location=sp)
void Move(Register dst, Handle< HeapObject > value, RelocInfo::Mode rmode=RelocInfo::FULL_EMBEDDED_OBJECT)
void F64x2ConvertLowI32x4U(Simd128Register dst, Simd128Register src, Register scratch1, Simd128Register scratch2)
void StoreU16(Register src, const MemOperand &mem, Register scratch)
void Move(DoubleRegister dst, DoubleRegister src)
void SwapSimd128(Simd128Register src, MemOperand dst, Simd128Register scratch1, Register scratch2)
void StoreU8(Register src, const MemOperand &mem, Register scratch)
void MultiPushV128(Simd128RegList dregs, Register scratch, Register location=sp)
void JumpIfLessThan(Register x, int32_t y, Label *dest)
void Push(Register src1, Register src2, Register src3, Register src4)
void ReverseBitsInSingleByteU64(Register dst, Register src, Register scratch1, Register scratch2, int byte_idx)
void AssertBoundFunction(Register object) NOOP_UNLESS_DEBUG_CODE
void Jump(Handle< Code > code, RelocInfo::Mode rmode, Condition cond=al, CRegister cr=cr0)
void TestBitMask(Register value, uintptr_t mask, Register scratch=r0)
void JumpJSFunction(Register function_object, Register scratch, JumpMode jump_mode=JumpMode::kJump)
void DivS64(Register dst, Register src, Register value, OEBit s=LeaveOE, RCBit r=LeaveRC)
void CallJSFunction(Register function_object, uint16_t argument_count, Register scratch)
void CompareInstanceTypeRange(Register map, Register type_reg, Register scratch, InstanceType lower_limit, InstanceType higher_limit)
void ExtendValue(Register dst, Register value)
void DecodeField(Register dst, Register src, RCBit rc=LeaveRC)
void OptimizeCodeOrTailCallOptimizedCodeSlot(Register flags, Register feedback_vector)
void LoadIsolateField(Register dst, IsolateFieldId id)
void StoreSimd128Uint8(Simd128Register reg, const MemOperand &mem, Register scratch)
void LoadU64(Register dst, const MemOperand &mem, Register scratch=no_reg)
void MaybeRestoreRegisters(RegList registers)
void CallRecordWriteStub(Register object, Register slot_address, SaveFPRegsMode fp_mode, StubCallMode mode=StubCallMode::kCallBuiltinPointer)
void I64x2UConvertI32x4Low(Simd128Register dst, Simd128Register src, Register scratch1, Simd128Register scratch2)
void Call(Register target)
void MovIntToDouble(DoubleRegister dst, Register src, Register scratch)
void PrepareCallCFunction(int num_reg_arguments, int num_double_registers, Register scratch)
void CmpS32(Register src1, Register src2, CRegister cr=cr0)
void SmiTag(Register reg, RCBit rc=LeaveRC)
void I8x16Splat(Simd128Register dst, Register src)
void CmpSmiLiteral(Register src1, Tagged< Smi > smi, Register scratch, CRegister cr=cr0)
void Drop(Register count, Register scratch=r0)
void LoadF64LE(DoubleRegister dst, const MemOperand &mem, Register scratch, Register scratch2)
void F32x4Splat(Simd128Register dst, DoubleRegister src, DoubleRegister scratch1, Register scratch2)
void AssertUndefinedOrAllocationSite(Register object, Register scratch) NOOP_UNLESS_DEBUG_CODE
void MulF32(DoubleRegister dst, DoubleRegister lhs, DoubleRegister rhs, RCBit r=LeaveRC)
void Move(Register dst, ExternalReference reference)
void LoadF32WithUpdate(DoubleRegister dst, const MemOperand &mem, Register scratch=no_reg)
void ByteReverseU16(Register dst, Register val, Register scratch)
void CompareTaggedRoot(const Register &with, RootIndex index)
void LoadAndExtend16x4ULE(Simd128Register dst, const MemOperand &mem, Register scratch1, Simd128Register scratch2)
void LoadSimd128Uint8(Simd128Register reg, const MemOperand &mem, Register scratch)
void ZeroExtByte(Register dst, Register src)
void LoadCodePointerField(Register destination, MemOperand field_operand, Register scratch)
void StubPrologue(StackFrame::Type type)
void StoreTrustedPointerField(Register value, MemOperand dst_field_operand, Register scratch=no_reg)
void StoreU32LE(Register src, const MemOperand &mem, Register scratch)
void DecompressTagged(const Register &destination, Tagged_t immediate)
void DecompressTagged(Register destination, Register source)
void DecompressTagged(Register destination, MemOperand field_operand)
void DecompressTaggedSigned(Register destination, Register src)
void I8x16ReplaceLane(Simd128Register dst, Simd128Register src1, Register src2, uint8_t imm_lane_idx, Simd128Register scratch)
void StoreRootRelative(int32_t offset, Register value) final
void Push(Register src1, Register src2)
void F64x2ReplaceLane(Simd128Register dst, Simd128Register src1, DoubleRegister src2, uint8_t imm_lane_idx, Register scratch1, Simd128Register scratch2)
void CountLeadingZerosU64(Register dst, Register src, RCBit r=LeaveRC)
void LoadMap(Register destination, Register object)
void MovFromFloatResult(DoubleRegister dst)
void LoadSandboxedPointerField(Register destination, const MemOperand &field_operand, Register scratch=no_reg)
void TailCallRuntime(Runtime::FunctionId fid)
void LoadCompressedMap(Register dst, Register object, Register scratch)
void SmiToPtrArrayOffset(Register dst, Register src)
void CallRuntime(Runtime::FunctionId fid, int num_arguments)
void AddAndCheckForOverflow(Register dst, Register left, intptr_t right, Register overflow_dst, Register scratch=r0)
void TestBit(Register value, int bitNumber, Register scratch=r0)
void AddAndCheckForOverflow(Register dst, Register left, Register right, Register overflow_dst, Register scratch=r0)
void LoadNativeContextSlot(Register dst, int index)
void CallRecordWriteStubSaveRegisters(Register object, Register slot_address, SaveFPRegsMode fp_mode, StubCallMode mode=StubCallMode::kCallBuiltinPointer)
void StoreSimd128Uint32(Simd128Register reg, const MemOperand &mem, Register scratch)
void TryInlineTruncateDoubleToI(Register result, DoubleRegister input, Label *done)
void ConvertInt64ToDouble(Register src, DoubleRegister double_dst)
void StoreSimd128LE(Simd128Register src, const MemOperand &mem, Register scratch1, Simd128Register scratch2)
void LoadConstantPoolPointerRegisterFromCodeTargetAddress(Register code_target_address, Register scratch1, Register scratch2)
void ConvertInt64ToFloat(Register src, DoubleRegister double_dst)
void F64x2Splat(Simd128Register dst, DoubleRegister src, Register scratch)
int PushCallerSaved(SaveFPRegsMode fp_mode, Register scratch1, Register scratch2, Register exclusion1=no_reg, Register exclusion2=no_reg, Register exclusion3=no_reg)
void LoadS16(Register dst, const MemOperand &mem, Register scratch=no_reg)
void StoreLane16LE(Simd128Register src, const MemOperand &mem, int lane, Register scratch1, Simd128Register scratch2)
void AddS64(Register dst, Register src, Register value, OEBit s=LeaveOE, RCBit r=LeaveRC)
void ByteReverseU32(Register dst, Register val, Register scratch)
void ConvertDoubleToInt64(const DoubleRegister double_input, const Register dst, const DoubleRegister double_dst, FPRoundingMode rounding_mode=kRoundToZero)
void LoadTaggedSignedField(Register destination, MemOperand field_operand, Register scratch)
void CheckPageFlag(Register object, Register scratch, int mask, Condition cc, Label *condition_met)
void StoreLane64LE(Simd128Register src, const MemOperand &mem, int lane, Register scratch1, Simd128Register scratch2)
void LoadLane16LE(Simd128Register dst, const MemOperand &mem, int lane, Register scratch1, Simd128Register scratch2)
void DropArguments(Register count)
void Jump(intptr_t target, RelocInfo::Mode rmode, Condition cond=al, CRegister cr=cr0)
void StoreF32WithUpdate(DoubleRegister src, const MemOperand &mem, Register scratch=no_reg)
void AssertCallableFunction(Register object) NOOP_UNLESS_DEBUG_CODE
void MultiPushDoubles(DoubleRegList dregs, Register location=sp)
#define NOOP_UNLESS_DEBUG_CODE
Definition assembler.h:628
#define COMPRESS_POINTERS_BOOL
Definition globals.h:99
int start
int end
DirectHandle< Object > new_target
Definition execution.cc:75
Label label
Isolate * isolate
int32_t offset
TNode< Object > receiver
RoundingMode rounding_mode
ZoneVector< RpoNumber > & result
LiftoffRegister reg
int y
int x
uint32_t const mask
#define SIMD_BITMASK_LIST(V)
#define PROTOTYPE_SIMD_ALL_TRUE(name)
#define PROTOTYPE_SIMD_UNOP_WITH_SCRATCH(name)
#define SIMD_UNOP_LIST(V)
#define SIMD_ALL_TRUE_LIST(V)
#define PROTOTYPE_SIMD_QFM(name)
#define PROTOTYPE_SIMD_BINOP(name)
#define PROTOTYPE_SIMD_SHIFT(name)
#define SIMD_BINOP_WITH_SCRATCH_LIST(V)
#define PROTOTYPE_SIMD_BINOP_WITH_SCRATCH(name)
#define SIMD_BINOP_LIST(V)
#define SIMD_UNOP_WITH_SCRATCH_LIST(V)
#define SIMD_QFM_LIST(V)
#define PROTOTYPE_SIMD_BITMASK(name)
#define SIMD_SHIFT_LIST(V)
#define SIMD_EXT_ADD_PAIRWISE_LIST(V)
#define PROTOTYPE_SIMD_EXT_ADD_PAIRWISE(name)
#define PROTOTYPE_SIMD_UNOP(name)
SmiCheck
InvokeType
SetIsolateDataSlots
JumpMode
RegListBase< RegisterT > registers
InstructionOperand destination
int r
Definition mul-fft.cc:298
constexpr Register no_reg
constexpr Register kRootRegister
MemOperand ExitFrameCallerStackSlotOperand(int index)
const int kSmiTagSize
Definition v8-internal.h:87
const int kStackFrameExtraParamSlot
Address Tagged_t
Definition globals.h:547
constexpr int kSystemPointerSizeLog2
Definition globals.h:494
constexpr int kSmiShift
MemOperand FieldMemOperand(Register object, int offset)
constexpr int kSystemPointerSize
Definition globals.h:410
constexpr bool SmiValuesAre31Bits()
const int kHeapObjectTag
Definition v8-internal.h:72
const int kSmiShiftSize
V8_EXPORT_PRIVATE FlagValues v8_flags
constexpr bool SmiValuesAre32Bits()
constexpr int kBitsPerSystemPointer
Definition globals.h:684
constexpr Register kPtrComprCageBaseRegister
void CallApiFunctionAndReturn(MacroAssembler *masm, bool with_profiling, Register function_address, ExternalReference thunk_ref, Register thunk_arg, int slots_to_drop_on_return, MemOperand *argc_operand, MemOperand return_value_operand)
const int kSmiTag
Definition v8-internal.h:86
Register GetRegisterThatIsNotOneOf(Register reg1, Register reg2=no_reg, Register reg3=no_reg, Register reg4=no_reg, Register reg5=no_reg, Register reg6=no_reg)
MemOperand ExitFrameStackSlotOperand(int offset)
#define UNREACHABLE()
Definition logging.h:67
#define DCHECK_GE(v1, v2)
Definition logging.h:488
#define DCHECK(condition)
Definition logging.h:482
#define V8_EXPORT_PRIVATE
Definition macros.h:460