v8
V8 is Google’s open source high-performance JavaScript and WebAssembly engine, written in C++.
Loading...
Searching...
No Matches
macro-assembler-x64.h
Go to the documentation of this file.
1// Copyright 2012 the V8 project authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
5#ifndef V8_CODEGEN_X64_MACRO_ASSEMBLER_X64_H_
6#define V8_CODEGEN_X64_MACRO_ASSEMBLER_X64_H_
7
8#ifndef INCLUDED_FROM_MACRO_ASSEMBLER_H
9#error This header must be included via macro-assembler.h
10#endif
11
12#include "src/base/flags.h"
16#include "src/common/globals.h"
21
22namespace v8 {
23namespace internal {
24
25// Convenience for platform-independent signatures.
26using MemOperand = Operand;
27
28struct SmiIndex {
30 : reg(index_register), scale(scale) {}
33};
34
35// TODO(victorgomes): Move definition to macro-assembler.h, once all other
36// platforms are updated.
38
39// Convenient class to access arguments below the stack pointer.
41 public:
42 // argc = the number of arguments not including the receiver.
43 explicit StackArgumentsAccessor(Register argc) : argc_(argc) {
45 }
46
47 // Argument 0 is the receiver (despite argc not including the receiver).
48 Operand operator[](int index) const { return GetArgumentOperand(index); }
49
50 Operand GetArgumentOperand(int index) const;
52
53 private:
54 const Register argc_;
55
57};
58
60 : public SharedMacroAssembler<MacroAssembler> {
61 public:
62 using SharedMacroAssembler<MacroAssembler>::SharedMacroAssembler;
63
64 void PushReturnAddressFrom(Register src) { pushq(src); }
65 void PopReturnAddressTo(Register dst) { popq(dst); }
66
67 void Ret();
68
69 // Call incsspq with {number_of_words} only if the cpu supports it.
70 // NOTE: This shouldn't be embedded in optimized code, since the check
71 // for CPU support would be redundant (we could check at compiler time).
72 void IncsspqIfSupported(Register number_of_words, Register scratch);
73
74 // Return and drop arguments from stack, where the number of arguments
75 // may be bigger than 2^16 - 1. Requires a scratch register.
76 void Ret(int bytes_dropped, Register scratch);
77
78 // Operations on roots in the root-array.
83 LoadRoot(kScratchRegister, index);
85 }
86
87 void Push(Register src);
88 void Push(Operand src);
89 void Push(Immediate value);
90 void Push(Tagged<Smi> smi);
92 Push(Immediate(static_cast<uint32_t>(index.ptr())));
93 }
95
96 enum class PushArrayOrder { kNormal, kReverse };
97 // `array` points to the first element (the lowest address).
98 // `array` and `size` are not modified.
99 void PushArray(Register array, Register size, Register scratch,
100 PushArrayOrder order = PushArrayOrder::kNormal);
101
102 // Before calling a C-function from generated code, align arguments on stack.
103 // After aligning the frame, arguments must be stored in rsp[0], rsp[8],
104 // etc., not pushed. The argument count assumes all arguments are word sized.
105 // The number of slots reserved for arguments depends on platform. On Windows
106 // stack slots are reserved for the arguments passed in registers. On other
107 // platforms stack slots are only reserved for the arguments actually passed
108 // on the stack.
109 void PrepareCallCFunction(int num_arguments);
110
111 // Calls a C function and cleans up the space for arguments allocated
112 // by PrepareCallCFunction. The called function is not allowed to trigger a
113 // garbage collection, since that might move the code and invalidate the
114 // return address (unless this is somehow accounted for by the called
115 // function).
117 ExternalReference function, int num_arguments,
118 SetIsolateDataSlots set_isolate_data_slots = SetIsolateDataSlots::kYes,
119 Label* return_location = nullptr);
121 Register function, int num_arguments,
122 SetIsolateDataSlots set_isolate_data_slots = SetIsolateDataSlots::kYes,
123 Label* return_location = nullptr);
124
125 // Calculate the number of stack slots to reserve for arguments when calling a
126 // C function.
127 static int ArgumentStackSlotsForCFunctionCall(int num_arguments);
128
130 void CheckPageFlag(Register object, Register scratch, int mask, Condition cc,
131 Label* condition_met,
132 Label::Distance condition_met_distance = Label::kFar);
133 void CheckMarkBit(Register object, Register scratch0, Register scratch1,
134 Condition cc, Label* condition_met,
135 Label::Distance condition_met_distance = Label::kFar);
136 void JumpIfMarking(Label* is_marking,
137 Label::Distance condition_met_distance = Label::kFar);
138 void JumpIfNotMarking(Label* not_marking,
139 Label::Distance condition_met_distance = Label::kFar);
140
141 // Define movq here instead of using AVX_OP. movq is defined using templates
142 // and there is a function template `void movq(P1)`, while technically
143 // impossible, will be selected when deducing the arguments for AvxHelper.
144 void Movq(XMMRegister dst, Register src);
145 void Movq(Register dst, XMMRegister src);
146
152 void Cvttsd2si(Register dst, Operand src);
156 void Cvttss2si(Register dst, Operand src);
167 void Cvttsd2uiq(Register dst, Operand src, Label* fail = nullptr);
168 void Cvttsd2uiq(Register dst, XMMRegister src, Label* fail = nullptr);
169 void Cvttsd2ui(Register dst, Operand src, Label* fail = nullptr);
170 void Cvttsd2ui(Register dst, XMMRegister src, Label* fail = nullptr);
171 void Cvttss2uiq(Register dst, Operand src, Label* fail = nullptr);
172 void Cvttss2uiq(Register dst, XMMRegister src, Label* fail = nullptr);
173 void Cvttss2ui(Register dst, Operand src, Label* fail = nullptr);
174 void Cvttss2ui(Register dst, XMMRegister src, Label* fail = nullptr);
177
178 // cvtsi2sd and cvtsi2ss instructions only write to the low 64/32-bit of dst
179 // register, which hinders register renaming and makes dependence chains
180 // longer. So we use xorpd to clear the dst register before cvtsi2sd for
181 // non-AVX and a scratch XMM register as first src for AVX to solve this
182 // issue.
191
194
195 void PextrdPreSse41(Register dst, XMMRegister src, uint8_t imm8);
196 void Pextrq(Register dst, XMMRegister src, int8_t imm8);
197
198 void PinsrdPreSse41(XMMRegister dst, Register src2, uint8_t imm8,
199 uint32_t* load_pc_offset = nullptr);
200 void PinsrdPreSse41(XMMRegister dst, Operand src2, uint8_t imm8,
201 uint32_t* load_pc_offset = nullptr);
202
203 void Pinsrq(XMMRegister dst, XMMRegister src1, Register src2, uint8_t imm8,
204 uint32_t* load_pc_offset = nullptr);
205 void Pinsrq(XMMRegister dst, XMMRegister src1, Operand src2, uint8_t imm8,
206 uint32_t* load_pc_offset = nullptr);
207
208 void Lzcntq(Register dst, Register src);
209 void Lzcntq(Register dst, Operand src);
210 void Lzcntl(Register dst, Register src);
211 void Lzcntl(Register dst, Operand src);
212 void Tzcntq(Register dst, Register src);
213 void Tzcntq(Register dst, Operand src);
214 void Tzcntl(Register dst, Register src);
215 void Tzcntl(Register dst, Operand src);
216 void Popcntl(Register dst, Register src);
217 void Popcntl(Register dst, Operand src);
218 void Popcntq(Register dst, Register src);
219 void Popcntq(Register dst, Operand src);
220
221 void Cmp(Register dst, Tagged<Smi> src);
222 void Cmp(Operand dst, Tagged<Smi> src);
223 void Cmp(Register dst, int32_t src);
224
225 void CmpTagged(const Register& src1, const Register& src2) {
226 cmp_tagged(src1, src2);
227 }
228
229 // SIMD256
231 YMMRegister tmp1, YMMRegister tmp2);
233 YMMRegister scratch);
235 YMMRegister scratch);
237 YMMRegister scratch);
239 YMMRegister scratch);
241 YMMRegister scratch, YMMRegister scratch2);
243 YMMRegister scratch, YMMRegister scratch2);
245 YMMRegister scratch, bool is_signed);
247 YMMRegister scratch, bool is_signed);
249 YMMRegister scratch, bool is_signed);
250#define MACRO_ASM_X64_IEXTADDPAIRWISE_LIST(V) \
251 V(I32x8ExtAddPairwiseI16x16S) \
252 V(I32x8ExtAddPairwiseI16x16U) \
253 V(I16x16ExtAddPairwiseI8x32S) \
254 V(I16x16ExtAddPairwiseI8x32U)
255
256#define DECLARE_IEXTADDPAIRWISE(ExtAddPairwiseOp) \
257 void ExtAddPairwiseOp(YMMRegister dst, YMMRegister src, YMMRegister scratch);
259#undef DECLARE_IEXTADDPAIRWISE
260#undef MACRO_ASM_X64_IEXTADDPAIRWISE_LIST
261
263 Register scratch);
265 Register scratch);
268 XMMRegister src3, YMMRegister tmp, YMMRegister tmp2);
270 XMMRegister src3, YMMRegister tmp, YMMRegister tmp2);
271
274 YMMRegister src2, YMMRegister scratch);
275
276// Splat
277#define MACRO_ASM_X64_ISPLAT_LIST(V) \
278 V(I8x32Splat, b, vmovd) \
279 V(I16x16Splat, w, vmovd) \
280 V(I32x8Splat, d, vmovd) \
281 V(I64x4Splat, q, vmovq)
282
283#define DECLARE_ISPLAT(name, suffix, instr_mov) \
284 void name(YMMRegister dst, Register src); \
285 void name(YMMRegister dst, Operand src);
286
288
289#undef DECLARE_ISPLAT
290
293
295 YMMRegister src3, YMMRegister tmp);
297 YMMRegister src3, YMMRegister tmp);
299 YMMRegister src3, YMMRegister tmp);
301 YMMRegister src3, YMMRegister tmp);
302
304 YMMRegister src2, YMMRegister src3,
305 YMMRegister scratch, YMMRegister splat_reg);
306
308 YMMRegister scratch2);
309
310 // ---------------------------------------------------------------------------
311 // Conversions between tagged smi values and non-tagged integer values.
312
313 // Tag an word-size value. The result must be known to be a valid smi value.
315 // Requires dst != src
316 void SmiTag(Register dst, Register src);
317
318 // Simple comparison of smis. Both sides must be known smis to use these,
319 // otherwise use Cmp.
320 void SmiCompare(Register smi1, Register smi2);
325
326 // Functions performing a check on a known or potential smi. Returns
327 // a condition that is satisfied if the check is successful.
330
331 // This can be used in testing to ensure we never rely on what is in the
332 // unused smi bits.
334
335 // Abort execution if argument is a smi, enabled via --debug-code.
337
338 // Abort execution if argument is not a smi, enabled via --debug-code.
341
342 // Test-and-jump functions. Typically combines a check function
343 // above with a conditional jump.
344
345 // Jump to label if the value is a tagged smi.
346 void JumpIfSmi(Register src, Label* on_smi,
347 Label::Distance near_jump = Label::kFar);
348
349 // Jump to label if the value is not a tagged smi.
350 void JumpIfNotSmi(Register src, Label* on_not_smi,
351 Label::Distance near_jump = Label::kFar);
352
353 // Jump to label if the value is not a tagged smi.
354 void JumpIfNotSmi(Operand src, Label* on_not_smi,
355 Label::Distance near_jump = Label::kFar);
356
357 // Operations on tagged smi values.
358
359 // Smis represent a subset of integers. The subset is always equivalent to
360 // a two's complement interpretation of a fixed number of bits.
361
362 // Add an integer constant to a tagged smi, giving a tagged smi as result.
363 // No overflow testing on the result is done.
364 void SmiAddConstant(Operand dst, Tagged<Smi> constant);
365
366 // Specialized operations
367
368 // Converts, if necessary, a smi to a combination of number and
369 // multiplier to be used as a scaled index.
370 // The src register contains a *positive* smi value. The shift is the
371 // power of two to multiply the index value by (e.g. to index by
372 // smi-value * kSystemPointerSize, pass the smi and kSystemPointerSizeLog2).
373 // The returned index register may be either src or dst, depending
374 // on what is most efficient. If src and dst are different registers,
375 // src is always unchanged.
376 SmiIndex SmiToIndex(Register dst, Register src, int shift);
377
378 void JumpIf(Condition cond, Register a, int32_t b, Label* dest) {
379 cmpl(a, Immediate(b));
380 j(cond, dest);
381 }
382
383 void JumpIfEqual(Register a, int32_t b, Label* dest) {
384 JumpIf(equal, a, b, dest);
385 }
386
387 void JumpIfLessThan(Register a, int32_t b, Label* dest) {
388 JumpIf(less, a, b, dest);
389 }
390
391 void JumpIfUnsignedLessThan(Register a, int32_t b, Label* dest) {
392 JumpIf(below, a, b, dest);
393 }
394
395 // Caution: if {reg} is a 32-bit negative int, it should be sign-extended to
396 // 64-bit before calling this function.
397 void Switch(Register scrach, Register reg, int case_base_value,
398 Label** labels, int num_labels);
399
400#ifdef V8_MAP_PACKING
401 void UnpackMapWord(Register r);
402#endif
403
406
407 void LoadFeedbackVector(Register dst, Register closure, Label* fbv_undef,
408 Label::Distance distance);
409
410 void Move(Register dst, intptr_t x) {
411 if (x == 0) {
412 xorl(dst, dst);
413 // The following shorter sequence for uint8 causes performance
414 // regressions:
415 // xorl(dst, dst); movb(dst, Immediate(static_cast<uint32_t>(x)));
416 } else if (is_uint32(x)) {
417 movl(dst, Immediate(static_cast<uint32_t>(x)));
418 } else if (is_int32(x)) {
419 // "movq reg64, imm32" is sign extending.
420 movq(dst, Immediate(static_cast<int32_t>(x)));
421 } else {
422 movq(dst, Immediate64(x));
423 }
424 }
425 void Move(Operand dst, intptr_t x);
426 void Move(Register dst, Tagged<Smi> source);
427
428 void Move(Operand dst, Tagged<Smi> source) {
429 Register constant = GetSmiConstant(source);
430 movq(dst, constant);
431 }
432
434 Move(dst, source.ptr());
435 }
436
437 void Move(Operand dst, Tagged<TaggedIndex> source) {
438 Move(dst, source.ptr());
439 }
440
442
443 void Move(XMMRegister dst, uint32_t src);
444 void Move(XMMRegister dst, uint64_t src);
445 void Move(XMMRegister dst, float src) {
446 Move(dst, base::bit_cast<uint32_t>(src));
447 }
448 void Move(XMMRegister dst, double src) {
449 Move(dst, base::bit_cast<uint64_t>(src));
450 }
451 void Move(XMMRegister dst, uint64_t high, uint64_t low);
452
453 // Move if the registers are not identical.
454 void Move(Register target, Register source);
455 void Move(XMMRegister target, XMMRegister source);
456
457 void Move(Register target, Operand source);
458 void Move(Register target, Immediate source);
459
461 RelocInfo::Mode rmode = RelocInfo::FULL_EMBEDDED_OBJECT);
463 RelocInfo::Mode rmode = RelocInfo::FULL_EMBEDDED_OBJECT);
464
465 // Loads a pointer into a register with a relocation mode.
466 void Move(Register dst, Address ptr, RelocInfo::Mode rmode) {
467 // This method must not be used with heap object references. The stored
468 // address is not GC safe. Use the handle version instead.
469 DCHECK(rmode == RelocInfo::NO_INFO || rmode > RelocInfo::LAST_GCED_ENUM);
470 movq(dst, Immediate64(ptr, rmode));
471 }
472
473 // Move src0 to dst0 and src1 to dst1, handling possible overlaps.
474 void MovePair(Register dst0, Register src0, Register dst1, Register src1);
475
476 // Convert smi to word-size sign-extended value.
479 // Requires dst != src
480 void SmiUntag(Register dst, Register src);
481 void SmiUntag(Register dst, Operand src);
483
484 // Convert smi to 32-bit value.
487
488 // Loads the address of the external reference into the destination
489 // register.
491
492 void LoadFromConstantsTable(Register destination, int constant_index) final;
495 void StoreRootRelative(int32_t offset, Register value) final;
496
497 // Operand pointing to an external reference.
498 // May emit code to set up the scratch register. The operand is
499 // only guaranteed to be correct as long as the scratch register
500 // isn't changed.
501 // If the operand is used more than once, use a scratch register
502 // that is guaranteed not to be clobbered.
504 Register scratch = kScratchRegister);
505
507 return ExternalReferenceAsOperand(ExternalReference::Create(id), no_reg);
508 }
509
510 void Call(Register reg) { call(reg); }
511 void Call(Operand op);
512 void Call(Handle<Code> code_object, RelocInfo::Mode rmode);
513 void Call(Address destination, RelocInfo::Mode rmode);
515 void Call(Label* target) { call(target); }
516
519 void CallBuiltinByIndex(Register builtin_index);
520 void CallBuiltin(Builtin builtin);
523
524 // Load the code entry point from the Code object.
529 JumpMode jump_mode = JumpMode::kJump);
530
531 // Convenience functions to call/jmp to the code of a JSFunction object.
532 // TODO(42204201): These don't work properly with leaptiering as we need to
533 // validate the parameter count at runtime. Instead, we should replace them
534 // with CallJSDispatchEntry that generates a call to a given (compile-time
535 // constant) JSDispatchHandle.
536 void CallJSFunction(Register function_object, uint16_t argument_count);
537 void JumpJSFunction(Register function_object,
538 JumpMode jump_mode = JumpMode::kJump);
539#ifdef V8_ENABLE_LEAPTIERING
540 void CallJSDispatchEntry(JSDispatchHandle dispatch_handle,
541 uint16_t argument_count);
542#endif
543#ifdef V8_ENABLE_WEBASSEMBLY
544 void CallWasmCodePointer(Register target, uint64_t signature_hash,
545 CallJumpMode call_jump_mode = CallJumpMode::kCall);
546 void CallWasmCodePointerNoSignatureCheck(Register target);
547 void LoadWasmCodePointer(Register dst, Operand src);
548#endif
549
550 void Jump(Address destination, RelocInfo::Mode rmode);
551 void Jump(Address destination, RelocInfo::Mode rmode, Condition cc);
552 void Jump(const ExternalReference& reference);
553 void Jump(Operand op);
554 void Jump(Operand op, Condition cc);
555 void Jump(Handle<Code> code_object, RelocInfo::Mode rmode);
556 void Jump(Handle<Code> code_object, RelocInfo::Mode rmode, Condition cc);
557
558 // TODO(olivf, 42204201) Rename this to AssertNotDeoptimized once
559 // non-leaptiering is removed from the codebase.
561 void CallForDeoptimization(Builtin target, int deopt_id, Label* exit,
563 Label* jump_deoptimization_entry_label);
564
565 void Trap();
567
571 void CompareRoot(Operand with, RootIndex index);
572
573 // Generates function and stub prologue code.
575 void Prologue();
576
577 // Helpers for argument handling
580 Register scratch);
582 Register scratch);
583
584 // Calls Abort(msg) if the condition cc is not satisfied.
585 // Use --debug_code to enable.
587
588 // Like Assert(), but without condition.
589 // Use --debug_code to enable.
591
592 // Abort execution if a 64 bit register containing a 32 bit payload does not
593 // have zeros in the top 32 bits, enabled via --debug-code.
595
596 // Abort execution if the signed bit of smi register with pointer compression
597 // is not zero, enabled via --debug-code.
599
600 // Like Assert(), but always enabled.
601 void Check(Condition cc, AbortReason reason);
602
603 // Same as Check() but expresses that the check is needed for the sandbox.
605
606 // Compare instance type for map.
607 // Always use unsigned comparisons: above and below, not less and greater.
609
610 // Abort execution if argument is not a Map, enabled via
611 // --debug-code.
613
614 // Abort execution if argument is not a Code, enabled via
615 // --debug-code.
617
618 // Abort execution if argument is not smi nor in the main pointer compression
619 // cage, enabled via --debug-code.
622
623 // Print a message to stdout and abort execution.
625
627
629
630 // Activation support.
632 void EnterFrame(StackFrame::Type type, bool load_constant_pool_pointer_reg) {
633 // Out-of-line constant pool not implemented on x64.
634 UNREACHABLE();
635 }
637
638// Allocate stack space of given size (i.e. decrement {rsp} by the value
639// stored in the given register, or by a constant). If you need to perform a
640// stack check, do it before calling this function because this function may
641// write into the newly allocated space. It may also overwrite the given
642// register's value, in the version that takes a register.
643#if defined(V8_TARGET_OS_WIN) || defined(V8_TARGET_OS_MACOS)
644 void AllocateStackSpace(Register bytes_scratch);
645 void AllocateStackSpace(int bytes);
646#else
647 void AllocateStackSpace(Register bytes) { subq(rsp, bytes); }
648 void AllocateStackSpace(int bytes) {
649 DCHECK_GE(bytes, 0);
650 if (bytes == 0) return;
651 subq(rsp, Immediate(bytes));
652 }
653#endif
654
656 ExternalReference isolate_root = ExternalReference::isolate_root(isolate());
657 Move(kRootRegister, isolate_root);
658#ifdef V8_COMPRESS_POINTERS
659 LoadRootRelative(kPtrComprCageBaseRegister,
660 IsolateData::cage_base_offset());
661#endif
662 }
663
664 void CallEphemeronKeyBarrier(Register object, Register slot_address,
665 SaveFPRegsMode fp_mode);
666
668 SaveFPRegsMode fp_mode,
670
672 Register object, Register slot_address, SaveFPRegsMode fp_mode,
673 StubCallMode mode = StubCallMode::kCallBuiltinPointer);
675 Register object, Register slot_address, SaveFPRegsMode fp_mode,
676 StubCallMode mode = StubCallMode::kCallBuiltinPointer);
677
678#ifdef V8_IS_TSAN
679 void CallTSANStoreStub(Register address, Register value,
680 SaveFPRegsMode fp_mode, int size, StubCallMode mode,
681 std::memory_order order);
682 void CallTSANRelaxedLoadStub(Register address, SaveFPRegsMode fp_mode,
683 int size, StubCallMode mode);
684#endif // V8_IS_TSAN
685
686 void MoveNumber(Register dst, double value);
687 void MoveNonSmi(Register dst, double value);
688
689 // Calculate how much stack space (in bytes) are required to store caller
690 // registers excluding those specified in the arguments.
692 Register exclusion = no_reg) const;
693
694 // PushCallerSaved and PopCallerSaved do not arrange the registers in any
695 // particular order so they are not useful for calls that can cause a GC.
696 // The caller can exclude a register that does not need to be saved and
697 // restored.
698
699 // Push caller saved registers on the stack, and return the number of bytes
700 // stack pointer is adjusted.
701 int PushCallerSaved(SaveFPRegsMode fp_mode, Register exclusion = no_reg);
702 // Restore caller saved registers from the stack, and return the number of
703 // bytes stack pointer is adjusted.
704 int PopCallerSaved(SaveFPRegsMode fp_mode, Register exclusion = no_reg);
705
708
710 int stack_slot_size = kStackSavedSavedFPSize);
712 int stack_slot_size = kStackSavedSavedFPSize);
713
714 // Compute the start of the generated instruction stream from the current PC.
715 // This is an alternative to embedding the {CodeObject} handle as a reference.
717
718 // Control-flow integrity:
719
720 // Define a function entrypoint which will emit a landing pad instruction if
721 // required by the build config.
722 void CodeEntry();
723 // Define an exception handler.
725 // Define an exception handler and bind a label.
726 void BindExceptionHandler(Label* label) { BindJumpTarget(label); }
727 // Bind a jump target and mark it as a valid code entry.
729 bind(label);
730 CodeEntry();
731 }
732
733 // ---------------------------------------------------------------------------
734 // Pointer compression support
735
736 // Loads a field containing any tagged value and decompresses it if necessary.
738
739 // Loads a field containing any tagged value but does not decompress it when
740 // pointer compression is enabled.
743 Operand field_operand);
744
745 // Loads a field containing a Smi and decompresses it if pointer compression
746 // is enabled.
748
749 // Loads a field containing any tagged value, decompresses it if necessary and
750 // pushes the full pointer to the stack. When pointer compression is enabled,
751 // uses |scratch| to decompress the value.
752 void PushTaggedField(Operand field_operand, Register scratch);
753
754 // Loads a field containing smi value and untags it.
757
758 // Compresses tagged value if necessary and stores it to given on-heap
759 // location.
760 void StoreTaggedField(Operand dst_field_operand, Immediate immediate);
761 void StoreTaggedField(Operand dst_field_operand, Register value);
762 void StoreTaggedSignedField(Operand dst_field_operand, Tagged<Smi> value);
763 void AtomicStoreTaggedField(Operand dst_field_operand, Register value);
764
765 // The following macros work even when pointer compression is not enabled.
771
772 // ---------------------------------------------------------------------------
773 // V8 Sandbox support
774
775 // Transform a SandboxedPointer from/to its encoded form, which is used when
776 // the pointer is stored on the heap and ensures that the pointer will always
777 // point into the sandbox.
780
781 // Load and decode a SandboxedPointer from the heap.
783 // Encode and store a SandboxedPointer to the heap.
784 void StoreSandboxedPointerField(Operand dst_field_operand, Register value);
785
786 enum class IsolateRootLocation { kInScratchRegister, kInRootRegister };
787 // Loads a field containing off-heap pointer and does necessary decoding
788 // if sandboxed external pointers are enabled.
790 ExternalPointerTagRange tag_range,
791 Register scratch,
792 IsolateRootLocation isolateRootLocation =
793 IsolateRootLocation::kInRootRegister);
794
795 // Load a trusted pointer field.
796 // When the sandbox is enabled, these are indirect pointers using the trusted
797 // pointer table. Otherwise they are regular tagged fields.
799 IndirectPointerTag tag, Register scratch);
800 // Store a trusted pointer field.
801 void StoreTrustedPointerField(Operand dst_field_operand, Register value);
802
803 // Load a code pointer field.
804 // These are special versions of trusted pointers that, when the sandbox is
805 // enabled, reference code objects through the code pointer table.
807 Register scratch) {
808 LoadTrustedPointerField(destination, field_operand, kCodeIndirectPointerTag,
809 scratch);
810 }
811 // Store a code pointer field.
812 void StoreCodePointerField(Operand dst_field_operand, Register value) {
813 StoreTrustedPointerField(dst_field_operand, value);
814 }
815
816 // Load an indirect pointer field.
817 // Only available when the sandbox is enabled, but always visible to avoid
818 // having to place the #ifdefs into the caller.
820 IndirectPointerTag tag, Register scratch);
821
822 // Store an indirect pointer field.
823 // Only available when the sandbox is enabled, but always visible to avoid
824 // having to place the #ifdefs into the caller.
825 void StoreIndirectPointerField(Operand dst_field_operand, Register value);
826
827#ifdef V8_ENABLE_SANDBOX
828 // Retrieve the heap object referenced by the given indirect pointer handle,
829 // which can either be a trusted pointer handle or a code pointer handle.
830 void ResolveIndirectPointerHandle(Register destination, Register handle,
832
833 // Retrieve the heap object referenced by the given trusted pointer handle.
834 void ResolveTrustedPointerHandle(Register destination, Register handle,
836
837 // Retrieve the Code object referenced by the given code pointer handle.
838 void ResolveCodePointerHandle(Register destination, Register handle);
839
840 // Load the pointer to a Code's entrypoint via a code pointer.
841 // Only available when the sandbox is enabled as it requires the code pointer
842 // table.
843 void LoadCodeEntrypointViaCodePointer(Register destination,
844 Operand field_operand,
846
847 // Load the value of Code pointer table corresponding to
848 // IsolateGroup::current()->code_pointer_table_.
849 // Only available when the sandbox is enabled.
850 void LoadCodePointerTableBase(Register destination);
851#endif // V8_ENABLE_SANDBOX
852
853#ifdef V8_ENABLE_LEAPTIERING
854 void LoadEntrypointFromJSDispatchTable(Register destination,
855 Register dispatch_handle);
856 void LoadEntrypointFromJSDispatchTable(Register destination,
857 JSDispatchHandle dispatch_handle);
858 void LoadParameterCountFromJSDispatchTable(Register destination,
859 Register dispatch_handle);
860 void LoadEntrypointAndParameterCountFromJSDispatchTable(
861 Register entrypoint, Register parameter_count, Register dispatch_handle);
862#endif // V8_ENABLE_LEAPTIERING
863
865
866 // Loads and stores the value of an external reference.
867 // Special case code for load and store to take advantage of
868 // load_rax/store_rax if possible/necessary.
869 // For other operations, just use:
870 // Operand operand = ExternalReferenceAsOperand(extref);
871 // operation(operand, ..);
874
875 // Pushes the address of the external reference onto the stack.
877
878 // Operations on roots in the root-array.
879 // Load a root value where the index (or part of it) is variable.
880 // The variable_offset register is added to the fixed_offset value
881 // to get the index into the root-array.
882 void PushRoot(RootIndex index);
883
884 // Compare the object in a register to a value and jump if they are equal.
885 void JumpIfRoot(Register with, RootIndex index, Label* if_equal,
886 Label::Distance if_equal_distance = Label::kFar) {
887 CompareRoot(with, index);
888 j(equal, if_equal, if_equal_distance);
889 }
890 void JumpIfRoot(Operand with, RootIndex index, Label* if_equal,
891 Label::Distance if_equal_distance = Label::kFar) {
892 CompareRoot(with, index);
893 j(equal, if_equal, if_equal_distance);
894 }
895
896 // Compare the object in a register to a value and jump if they are not equal.
897 void JumpIfNotRoot(Register with, RootIndex index, Label* if_not_equal,
898 Label::Distance if_not_equal_distance = Label::kFar) {
899 CompareRoot(with, index);
900 j(not_equal, if_not_equal, if_not_equal_distance);
901 }
902 void JumpIfNotRoot(Operand with, RootIndex index, Label* if_not_equal,
903 Label::Distance if_not_equal_distance = Label::kFar) {
904 CompareRoot(with, index);
905 j(not_equal, if_not_equal, if_not_equal_distance);
906 }
907
908 // ---------------------------------------------------------------------------
909 // GC Support
910
911 // Notify the garbage collector that we wrote a pointer into an object.
912 // |object| is the object being stored into, |value| is the object being
913 // stored. value and scratch registers are clobbered by the operation.
914 // The offset is the offset from the start of the object, not the offset from
915 // the tagged HeapObject pointer. For use with FieldOperand(reg, off).
917 Register object, int offset, Register value, Register slot_address,
918 SaveFPRegsMode save_fp, SmiCheck smi_check = SmiCheck::kInline,
920 SlotDescriptor slot = SlotDescriptor::ForDirectPointerSlot());
921
922 // For page containing |object| mark region covering |address|
923 // dirty. |object| is the object being stored into, |value| is the
924 // object being stored. The address and value registers are clobbered by the
925 // operation. RecordWrite filters out smis so it does not update
926 // the write barrier if the value is a smi.
928 Register object, Register slot_address, Register value,
929 SaveFPRegsMode save_fp, SmiCheck smi_check = SmiCheck::kInline,
931 SlotDescriptor slot = SlotDescriptor::ForDirectPointerSlot());
932
933 // Allocates an EXIT/BUILTIN_EXIT/API_CALLBACK_EXIT frame with given number
934 // of slots in non-GCed area.
935 void EnterExitFrame(int extra_slots, StackFrame::Type frame_type,
936 Register c_function);
938
939 // ---------------------------------------------------------------------------
940 // JavaScript invokes
941
942 // The way we invoke JSFunctions differs depending on whether leaptiering is
943 // enabled. As such, these functions exist in two variants. In the future,
944 // leaptiering will be used on all platforms. At that point, the
945 // non-leaptiering variants will disappear.
946
947#ifdef V8_ENABLE_LEAPTIERING
948 // Invoke the JavaScript function code by either calling or jumping.
949 void InvokeFunctionCode(Register function, Register new_target,
950 Register actual_parameter_count, InvokeType type,
951 ArgumentAdaptionMode argument_adaption_mode =
953
954 // Invoke the JavaScript function in the given register. Changes the
955 // current context to the context in the function before invoking.
956 void InvokeFunction(Register function, Register new_target,
957 Register actual_parameter_count, InvokeType type,
958 ArgumentAdaptionMode argument_adaption_mode =
960#else
961 // Invoke the JavaScript function code by either calling or jumping.
963 Register expected_parameter_count,
964 Register actual_parameter_count, InvokeType type);
965
966 // Invoke the JavaScript function in the given register. Changes the
967 // current context to the context in the function before invoking.
969 Register actual_parameter_count, InvokeType type);
970
972 Register expected_parameter_count,
973 Register actual_parameter_count, InvokeType type);
974#endif // V8_ENABLE_LEAPTIERING
975
976 // On function call, call into the debugger.
979 Register expected_parameter_count_or_dispatch_handle,
980 Register actual_parameter_count);
981
982 // ---------------------------------------------------------------------------
983 // Macro instructions.
984
985 void Cmp(Register dst, Handle<Object> source);
986 void Cmp(Operand dst, Handle<Object> source);
987
988 // Checks if value is in range [lower_limit, higher_limit] using a single
989 // comparison. Flags CF=1 or ZF=1 indicate the value is in the range
990 // (condition below_equal).
991 void CompareRange(Register value, unsigned lower_limit,
992 unsigned higher_limit);
993 void JumpIfIsInRange(Register value, unsigned lower_limit,
994 unsigned higher_limit, Label* on_in_range,
995 Label::Distance near_jump = Label::kFar);
996
997 // Emit code to discard a non-negative number of pointer-sized elements
998 // from the stack, clobbering only the rsp register.
999 void Drop(int stack_elements);
1000 // Emit code to discard a positive number of pointer-sized elements
1001 // from the stack under the return address which remains on the top,
1002 // clobbering the rsp register.
1003 void DropUnderReturnAddress(int stack_elements,
1004 Register scratch = kScratchRegister);
1006 void PushImm32(int32_t imm32);
1007 void Pop(Register dst);
1008 void Pop(Operand dst);
1009 void PopQuad(Operand dst);
1010
1011 // Compare object type for heap object.
1012 // Always use unsigned comparisons: above and below, not less and greater.
1013 // Incoming register is heap_object and outgoing register is map.
1014 // They may be the same register, and may be kScratchRegister.
1015 void CmpObjectType(Register heap_object, InstanceType type, Register map);
1016 // Variant of the above, which only guarantees to set the correct
1017 // equal/not_equal flag. Map might not be loaded.
1018 void IsObjectType(Register heap_object, InstanceType type, Register scratch);
1019 // Variant of the above, which compares against a type range rather than a
1020 // single type (lower_limit and higher_limit are inclusive).
1021 //
1022 // Always use unsigned comparisons: below for a positive result.
1024 InstanceType high, Register scratch);
1025#if V8_STATIC_ROOTS_BOOL
1026 // Fast variant which is guaranteed to not actually load the instance type
1027 // from the map.
1028 void IsObjectTypeFast(Register heap_object, InstanceType type,
1029 Register compressed_map_scratch);
1030 void CompareInstanceTypeWithUniqueCompressedMap(Register map,
1031 InstanceType type);
1032#endif // V8_STATIC_ROOTS_BOOL
1033
1034 // Fast check if the object is a js receiver type. Assumes only primitive
1035 // objects or js receivers are passed.
1037 Register heap_object, Register scratch, Label* target,
1038 Label::Distance distance = Label::kFar,
1039 Condition condition = Condition::kUnsignedGreaterThanEqual);
1040 void JumpIfJSAnyIsPrimitive(Register heap_object, Register scratch,
1041 Label* target,
1042 Label::Distance distance = Label::kFar) {
1043 return JumpIfJSAnyIsNotPrimitive(heap_object, scratch, target, distance,
1044 Condition::kUnsignedLessThan);
1045 }
1046
1047 // Compare instance type ranges for a map (low and high inclusive)
1048 // Always use unsigned comparisons: below_equal for a positive result.
1049 void CmpInstanceTypeRange(Register map, Register instance_type_out,
1050 InstanceType low, InstanceType high);
1051
1052 template <typename Field>
1054 static const int shift = Field::kShift;
1055 static const int mask = Field::kMask >> Field::kShift;
1056 if (shift != 0) {
1057 shrq(reg, Immediate(shift));
1058 }
1059 andq(reg, Immediate(mask));
1060 }
1061
1065
1066 // Tiering support.
1072 Register closure, Register scratch1,
1073 Register slot_address);
1075 JumpMode jump_mode = JumpMode::kJump);
1076#ifndef V8_ENABLE_LEAPTIERING
1078 CodeKind current_code_kind);
1080 Register feedback_vector, CodeKind current_code_kind,
1081 Label* flags_need_processing);
1083 Register closure,
1084 JumpMode jump_mode);
1085 // For compatibility with other archs.
1087 Register feedback_vector) {
1088 OptimizeCodeOrTailCallOptimizedCodeSlot(
1089 feedback_vector, kJSFunctionRegister, JumpMode::kJump);
1090 }
1091#endif // !V8_ENABLE_LEAPTIERING
1092
1093 // Abort execution if argument is not a Constructor, enabled via --debug-code.
1095
1096 // Abort execution if argument is not a JSFunction, enabled via --debug-code.
1098
1099 // Abort execution if argument is not a callable JSFunction, enabled via
1100 // --debug-code.
1102
1103 // Abort execution if argument is not a JSBoundFunction,
1104 // enabled via --debug-code.
1106
1107 // Abort execution if argument is not a JSGeneratorObject (or subclass),
1108 // enabled via --debug-code.
1110
1111 // Abort execution if argument is not undefined or an AllocationSite, enabled
1112 // via --debug-code.
1114
1115 void AssertJSAny(Register object, Register map_tmp,
1116 AbortReason abort_reason) NOOP_UNLESS_DEBUG_CODE;
1117
1118 // ---------------------------------------------------------------------------
1119 // Exception handling
1120
1121 // Push a new stack handler and link it into stack handler chain.
1123
1124 // Unlink the stack handler on top of the stack from the stack handler chain.
1126
1127 // ---------------------------------------------------------------------------
1128 // Support functions.
1129
1130 // Load the global proxy from the current context.
1132 LoadNativeContextSlot(dst, Context::GLOBAL_PROXY_INDEX);
1133 }
1134
1135 // Load the native context slot with the current index.
1136 void LoadNativeContextSlot(Register dst, int index);
1137
1138 // Falls through and sets scratch_and_result to 0 on failure, jumps to
1139 // on_result on success.
1140 void TryLoadOptimizedOsrCode(Register scratch_and_result,
1141 CodeKind min_opt_level, Register feedback_vector,
1142 FeedbackSlot slot, Label* on_result,
1143 Label::Distance distance);
1144
1145 // ---------------------------------------------------------------------------
1146 // Runtime calls
1147
1148 // Call a runtime routine.
1149 void CallRuntime(const Runtime::Function* f, int num_arguments);
1150
1151 // Convenience function: Same as above, but takes the fid instead.
1153 const Runtime::Function* function = Runtime::FunctionForId(fid);
1154 CallRuntime(function, function->nargs);
1155 }
1156
1157 // Convenience function: Same as above, but takes the fid instead.
1158 void CallRuntime(Runtime::FunctionId fid, int num_arguments) {
1159 CallRuntime(Runtime::FunctionForId(fid), num_arguments);
1160 }
1161
1162 // Convenience function: tail call a runtime routine (jump)
1164
1165 // Jump to a runtime routines
1167 bool builtin_exit_frame = false);
1168
1169 // ---------------------------------------------------------------------------
1170 // StatsCounter support
1171 void IncrementCounter(StatsCounter* counter, int value) {
1172 if (!v8_flags.native_code_counters) return;
1173 EmitIncrementCounter(counter, value);
1174 }
1175 void EmitIncrementCounter(StatsCounter* counter, int value);
1176 void DecrementCounter(StatsCounter* counter, int value) {
1177 if (!v8_flags.native_code_counters) return;
1178 EmitDecrementCounter(counter, value);
1179 }
1180 void EmitDecrementCounter(StatsCounter* counter, int value);
1181
1182 // ---------------------------------------------------------------------------
1183 // Stack limit utilities
1186 Register num_args, Label* stack_overflow,
1187 Label::Distance stack_overflow_distance = Label::kFar);
1188
1189 // ---------------------------------------------------------------------------
1190 // In-place weak references.
1191 void LoadWeakValue(Register in_out, Label* target_if_cleared);
1192
1193 protected:
1194 static const int kSmiShift = kSmiTagSize + kSmiShiftSize;
1195
1196 // Returns a register holding the smi value. The register MUST NOT be
1197 // modified. It may be the "smi 1 constant" register.
1199
1200 // Drops arguments assuming that the return address was already popped.
1202
1203 private:
1204 // Helper functions for generating invokes.
1205 void InvokePrologue(Register expected_parameter_count,
1206 Register actual_parameter_count, InvokeType type);
1207
1209};
1210
1211// -----------------------------------------------------------------------------
1212// Static helper functions.
1213
1214// Generate an Operand for loading a field from an object.
1215inline Operand FieldOperand(Register object, int offset) {
1216 return Operand(object, offset - kHeapObjectTag);
1217}
1218
1219// For compatibility with platform-independent code.
1220inline MemOperand FieldMemOperand(Register object, int offset) {
1221 return MemOperand(object, offset - kHeapObjectTag);
1222}
1223
1224// Generate an Operand for loading a field from an object. Object pointer is a
1225// compressed pointer when pointer compression is enabled.
1228 return Operand(kPtrComprCageBaseRegister, object.reg(),
1229 ScaleFactor::times_1, offset - kHeapObjectTag);
1230 } else {
1231 return Operand(object.reg(), offset - kHeapObjectTag);
1232 }
1233}
1234
1235// Generate an Operand for loading an indexed field from an object.
1236inline Operand FieldOperand(Register object, Register index, ScaleFactor scale,
1237 int offset) {
1238 return Operand(object, index, scale, offset - kHeapObjectTag);
1239}
1240
1241// Provides access to exit frame stack space (not GC-ed).
1242inline Operand ExitFrameStackSlotOperand(int offset) {
1243#ifdef V8_TARGET_OS_WIN
1244 return Operand(rsp, offset + kWindowsHomeStackSlots * kSystemPointerSize);
1245#else
1246 return Operand(rsp, offset);
1247#endif
1248}
1249
1250// Provides access to exit frame parameters (GC-ed).
1251inline Operand ExitFrameCallerStackSlotOperand(int index) {
1252 return Operand(rbp,
1255}
1256
1258 // Whether a move in the cycle needs the scratch or double scratch register.
1261};
1262
1263// Calls an API function. Allocates HandleScope, extracts returned value
1264// from handle and propagates exceptions. Clobbers C argument registers
1265// and C caller-saved registers. Restores context. On return removes
1266// (*argc_operand + slots_to_drop_on_return) * kSystemPointerSize
1267// (GCed, includes the call JS arguments space and the additional space
1268// allocated for the fast call).
1269void CallApiFunctionAndReturn(MacroAssembler* masm, bool with_profiling,
1270 Register function_address,
1271 ExternalReference thunk_ref, Register thunk_arg,
1272 int slots_to_drop_on_return,
1273 MemOperand* argc_operand,
1274 MemOperand return_value_operand);
1275
1276#define ACCESS_MASM(masm) masm->
1277
1278} // namespace internal
1279} // namespace v8
1280
1281#endif // V8_CODEGEN_X64_MACRO_ASSEMBLER_X64_H_
int16_t parameter_count
Definition builtins.cc:67
interpreter::OperandScale scale
Definition builtins.cc:44
Builtins::Kind kind
Definition builtins.cc:40
static constexpr int kFixedSlotCountAboveFp
void LoadTaggedSignedField(Register destination, Operand field_operand)
void JumpIfJSAnyIsPrimitive(Register heap_object, Register scratch, Label *target, Label::Distance distance=Label::kFar)
void Cvttsd2ui(Register dst, Operand src, Label *fail=nullptr)
void Move(XMMRegister dst, float src)
void PextrdPreSse41(Register dst, XMMRegister src, uint8_t imm8)
void LoadTaggedField(Register destination, Operand field_operand)
void Cvttsd2si(Register dst, XMMRegister src)
void EncodeSandboxedPointer(Register value)
void Cvttss2si(Register dst, XMMRegister src)
void Cvtqui2sd(XMMRegister dst, Register src)
void Move(Register target, Register source)
void Push(Immediate value)
void S256Not(YMMRegister dst, YMMRegister src, YMMRegister scratch)
void Cvtlui2sd(XMMRegister dst, Register src)
void Abort(AbortReason msg)
void Cvttsd2uiq(Register dst, XMMRegister src, Label *fail=nullptr)
static int ArgumentStackSlotsForCFunctionCall(int num_arguments)
void JumpIf(Condition cond, Register a, int32_t b, Label *dest)
void Move(XMMRegister dst, uint32_t src)
void PinsrdPreSse41(XMMRegister dst, Operand src2, uint8_t imm8, uint32_t *load_pc_offset=nullptr)
void Push(Tagged< TaggedIndex > index)
void IncrementCounter(StatsCounter *counter, int value)
void CallJSFunction(Register function_object, uint16_t argument_count)
void Cvttss2siq(Register dst, XMMRegister src)
void JumpIfRoot(Register with, RootIndex index, Label *if_equal, Label::Distance if_equal_distance=Label::kFar)
void Cvttsd2siq(Register dst, XMMRegister src)
void Cmp(Register dst, Handle< Object > source)
void Cvttsd2uiq(Register dst, Operand src, Label *fail=nullptr)
void Cvttss2uiq(Register dst, Operand src, Label *fail=nullptr)
void F32x8Max(YMMRegister dst, YMMRegister lhs, YMMRegister rhs, YMMRegister scratch)
void LoadAddress(Register destination, ExternalReference source)
void StoreTrustedPointerField(Operand dst_field_operand, Register value)
void PushAddress(ExternalReference source)
void IsObjectType(Register heap_object, InstanceType type, Register scratch)
void AssertMap(Register object) NOOP_UNLESS_DEBUG_CODE
void Jump(Handle< Code > code_object, RelocInfo::Mode rmode, Condition cc)
void Popcntq(Register dst, Register src)
void AtomicStoreTaggedField(Operand dst_field_operand, Register value)
void Lzcntq(Register dst, Operand src)
int PopCallerSaved(SaveFPRegsMode fp_mode, Register exclusion=no_reg)
void Pextrq(Register dst, XMMRegister src, int8_t imm8)
void CallRuntime(Runtime::FunctionId fid)
void F32x8Qfms(YMMRegister dst, YMMRegister src1, YMMRegister src2, YMMRegister src3, YMMRegister tmp)
void EnterFrame(StackFrame::Type type)
void Cvtlsi2ss(XMMRegister dst, Operand src)
void ClobberDecompressedSmiBits(Register smi)
void Popcntl(Register dst, Operand src)
void Move(Operand dst, Tagged< Smi > source)
void JumpIfUnsignedLessThan(Register a, int32_t b, Label *dest)
void AssertFunction(Register object) NOOP_UNLESS_DEBUG_CODE
void SmiUntagField(Register dst, Operand src)
void CallIndirectPointerBarrier(Register object, Register slot_address, SaveFPRegsMode fp_mode, IndirectPointerTag tag)
void Cvtlsi2sd(XMMRegister dst, Operand src)
void SmiUntagUnsigned(Register reg)
void AssertGeneratorObject(Register object) NOOP_UNLESS_DEBUG_CODE
void DecompressTaggedSigned(Register destination, Operand field_operand)
void LoadProtectedPointerField(Register destination, Operand field_operand)
void EmitDecrementCounter(StatsCounter *counter, int value)
void Move(Operand dst, Handle< HeapObject > source, RelocInfo::Mode rmode=RelocInfo::FULL_EMBEDDED_OBJECT)
void Cvttsd2ui(Register dst, XMMRegister src, Label *fail=nullptr)
void LoadIndirectPointerField(Register destination, Operand field_operand, IndirectPointerTag tag, Register scratch)
void I16x16ExtMul(YMMRegister dst, XMMRegister src1, XMMRegister src2, YMMRegister scratch, bool is_signed)
int CallCFunction(ExternalReference function, int num_arguments, SetIsolateDataSlots set_isolate_data_slots=SetIsolateDataSlots::kYes, Label *return_location=nullptr)
void Cmpeqss(XMMRegister dst, XMMRegister src)
void Lzcntl(Register dst, Operand src)
void Store(ExternalReference destination, Register source)
void StoreIndirectPointerField(Operand dst_field_operand, Register value)
void InvokeFunction(Register function, Register new_target, Register actual_parameter_count, InvokeType type)
void Call(Handle< Code > code_object, RelocInfo::Mode rmode)
void Assert(Condition cc, AbortReason reason) NOOP_UNLESS_DEBUG_CODE
void JumpIfEqual(Register a, int32_t b, Label *dest)
void InvokeFunction(Register function, Register new_target, Register expected_parameter_count, Register actual_parameter_count, InvokeType type)
void Cvtqsi2sd(XMMRegister dst, Register src)
void JumpIfNotSmi(Register src, Label *on_not_smi, Label::Distance near_jump=Label::kFar)
void Cvttsd2si(Register dst, Operand src)
void RecordWriteField(Register object, int offset, Register value, Register slot_address, SaveFPRegsMode save_fp, SmiCheck smi_check=SmiCheck::kInline, ReadOnlyCheck ro_check=ReadOnlyCheck::kInline, SlotDescriptor slot=SlotDescriptor::ForDirectPointerSlot())
void Jump(Address destination, RelocInfo::Mode rmode)
void DropArguments(Register count, Register scratch)
void F16x8Max(YMMRegister dst, XMMRegister lhs, XMMRegister rhs, YMMRegister scratch, YMMRegister scratch2)
void AssertFeedbackVector(Register object, Register scratch) NOOP_UNLESS_DEBUG_CODE
void Movq(Register dst, XMMRegister src)
void EnterExitFrame(int extra_slots, StackFrame::Type frame_type, Register c_function)
void F64x4Max(YMMRegister dst, YMMRegister lhs, YMMRegister rhs, YMMRegister scratch)
void I32x8DotI8x32I7x32AddS(YMMRegister dst, YMMRegister src1, YMMRegister src2, YMMRegister src3, YMMRegister scratch, YMMRegister splat_reg)
void LoadRootRelative(Register destination, int32_t offset) final
void JumpIfNotRoot(Register with, RootIndex index, Label *if_not_equal, Label::Distance if_not_equal_distance=Label::kFar)
void Cvttss2si(Register dst, Operand src)
void Cvtsd2ss(XMMRegister dst, Operand src)
void CmpInstanceTypeRange(Register map, Register instance_type_out, InstanceType low, InstanceType high)
void StoreCodePointerField(Operand dst_field_operand, Register value)
void CallEphemeronKeyBarrier(Register object, Register slot_address, SaveFPRegsMode fp_mode)
void StoreTaggedField(Operand dst_field_operand, Immediate immediate)
void CmpTagged(const Register &src1, const Register &src2)
void AssertUnreachable(AbortReason reason) NOOP_UNLESS_DEBUG_CODE
void LoadCompressedMap(Register destination, Register object)
void DropArgumentsAndPushNewReceiver(Register argc, Operand receiver, Register scratch)
void LoadRootRegisterOffset(Register destination, intptr_t offset) final
void EmitIncrementCounter(StatsCounter *counter, int value)
void PopQuad(Operand dst)
void Cvtlsi2ss(XMMRegister dst, Register src)
void Cvtqui2ss(XMMRegister dst, Register src)
void AssertSmi(Register object) NOOP_UNLESS_DEBUG_CODE
void Cvtph2pd(XMMRegister dst, XMMRegister src)
void Cvtqsi2ss(XMMRegister dst, Register src)
void LoadWeakValue(Register in_out, Label *target_if_cleared)
void Check(Condition cc, AbortReason reason)
void F16x8Qfma(YMMRegister dst, XMMRegister src1, XMMRegister src2, XMMRegister src3, YMMRegister tmp, YMMRegister tmp2)
void CheckPageFlag(Register object, Register scratch, int mask, Condition cc, Label *condition_met, Label::Distance condition_met_distance=Label::kFar)
void SmiUntag(Register dst, Register src)
void CompareRoot(Register with, RootIndex index, ComparisonMode mode=ComparisonMode::kDefault)
void PushTaggedField(Operand field_operand, Register scratch)
void SmiCompare(Operand dst, Register src)
void Cmp(Register dst, Tagged< Smi > src)
void Move(Register dst, Tagged< TaggedIndex > source)
void Cmp(Operand dst, Handle< Object > source)
int CallCFunction(Register function, int num_arguments, SetIsolateDataSlots set_isolate_data_slots=SetIsolateDataSlots::kYes, Label *return_location=nullptr)
void AssertCode(Register object) NOOP_UNLESS_DEBUG_CODE
void TailCallBuiltin(Builtin builtin)
int PopAll(DoubleRegList registers, int stack_slot_size=kStackSavedSavedFPSize)
void Move(XMMRegister dst, uint64_t high, uint64_t low)
void Movq(XMMRegister dst, Register src)
void LoadSandboxedPointerField(Register destination, Operand field_operand)
void InvokeFunctionCode(Register function, Register new_target, Register expected_parameter_count, Register actual_parameter_count, InvokeType type)
void TailCallBuiltin(Builtin builtin, Condition cc)
void Load(Register destination, ExternalReference source)
void DecodeSandboxedPointer(Register value)
void Move(Operand dst, Tagged< TaggedIndex > source)
void JumpIfNotRoot(Operand with, RootIndex index, Label *if_not_equal, Label::Distance if_not_equal_distance=Label::kFar)
void Jump(const ExternalReference &reference)
void CompareTaggedRoot(Register with, RootIndex index)
void Cvtqsi2sd(XMMRegister dst, Operand src)
void JumpIfJSAnyIsNotPrimitive(Register heap_object, Register scratch, Label *target, Label::Distance distance=Label::kFar, Condition condition=Condition::kUnsignedGreaterThanEqual)
void Cvtlui2ss(XMMRegister dst, Operand src)
void LoadCodeInstructionStart(Register destination, Register code_object, CodeEntrypointTag tag)
void Jump(Address destination, RelocInfo::Mode rmode, Condition cc)
void Cvttss2ui(Register dst, Operand src, Label *fail=nullptr)
void InvokePrologue(Register expected_parameter_count, Register actual_parameter_count, InvokeType type)
Operand EntryFromBuiltinIndexAsOperand(Register builtin_index)
void SmiToInt32(Register reg)
void Cvtqui2sd(XMMRegister dst, Operand src)
void SbxCheck(Condition cc, AbortReason reason)
void SmiCompare(Operand dst, Tagged< Smi > src)
void ReplaceClosureCodeWithOptimizedCode(Register optimized_code, Register closure, Register scratch1, Register slot_address)
void PushArray(Register array, Register size, Register scratch, PushArrayOrder order=PushArrayOrder::kNormal)
void AssertZeroExtended(Register reg) NOOP_UNLESS_DEBUG_CODE
SmiIndex SmiToIndex(Register dst, Register src, int shift)
void Tzcntq(Register dst, Register src)
void GenerateTailCallToReturnedCode(Runtime::FunctionId function_id, JumpMode jump_mode=JumpMode::kJump)
void SmiCompare(Register dst, Tagged< Smi > src)
void SmiUntagFieldUnsigned(Register dst, Operand src)
void BailoutIfDeoptimized(Register scratch)
void SmiTag(Register dst, Register src)
void Cvtsd2ss(XMMRegister dst, XMMRegister src)
void Push(Register src)
void I16x8SConvertF16x8(YMMRegister dst, XMMRegister src, YMMRegister tmp, Register scratch)
void Cvttss2uiq(Register dst, XMMRegister src, Label *fail=nullptr)
void Call(ExternalReference ext)
void CompareRange(Register value, unsigned lower_limit, unsigned higher_limit)
void StackOverflowCheck(Register num_args, Label *stack_overflow, Label::Distance stack_overflow_distance=Label::kFar)
void CallBuiltin(Builtin builtin)
void Call(Address destination, RelocInfo::Mode rmode)
int PopAll(RegList registers)
void CmpObjectType(Register heap_object, InstanceType type, Register map)
Operand EntryFromBuiltinAsOperand(Builtin builtin_index)
void Move(Register target, Operand source)
void OptimizeCodeOrTailCallOptimizedCodeSlot(Register feedback_vector, Register closure, JumpMode jump_mode)
void LeaveFrame(StackFrame::Type type)
void F64x4Qfma(YMMRegister dst, YMMRegister src1, YMMRegister src2, YMMRegister src3, YMMRegister tmp)
int PushCallerSaved(SaveFPRegsMode fp_mode, Register exclusion=no_reg)
void JumpIfMarking(Label *is_marking, Label::Distance condition_met_distance=Label::kFar)
void Move(Register dst, Handle< HeapObject > source, RelocInfo::Mode rmode=RelocInfo::FULL_EMBEDDED_OBJECT)
void CompareRoot(Operand with, RootIndex index)
void Move(Register dst, Address ptr, RelocInfo::Mode rmode)
void F32x8Min(YMMRegister dst, YMMRegister lhs, YMMRegister rhs, YMMRegister scratch)
void CheckFeedbackVectorFlagsAndJumpIfNeedsProcessing(Register feedback_vector, CodeKind current_code_kind, Label *flags_need_processing)
void LoadExternalPointerField(Register destination, Operand field_operand, ExternalPointerTagRange tag_range, Register scratch, IsolateRootLocation isolateRootLocation=IsolateRootLocation::kInRootRegister)
void AssertSmi(Operand object) NOOP_UNLESS_DEBUG_CODE
Operand RootAsOperand(RootIndex index)
Operand ExternalReferenceAsOperand(ExternalReference reference, Register scratch=kScratchRegister)
int PushAll(DoubleRegList registers, int stack_slot_size=kStackSavedSavedFPSize)
void LoadCodePointerField(Register destination, Operand field_operand, Register scratch)
void BindExceptionHandler(Label *label)
void DropUnderReturnAddress(int stack_elements, Register scratch=kScratchRegister)
void LoadRoot(Register destination, RootIndex index) final
void Drop(int stack_elements)
Immediate ClearedValue() const
void PushRoot(RootIndex index)
void F64x4Splat(YMMRegister dst, XMMRegister src)
void Switch(Register scrach, Register reg, int case_base_value, Label **labels, int num_labels)
void Popcntl(Register dst, Register src)
void Push(Handle< HeapObject > source)
void LoadRoot(Operand destination, RootIndex index)
void MoveNumber(Register dst, double value)
void Pinsrq(XMMRegister dst, XMMRegister src1, Register src2, uint8_t imm8, uint32_t *load_pc_offset=nullptr)
void Cvtlsi2sd(XMMRegister dst, Register src)
void CallCodeObject(Register code_object, CodeEntrypointTag tag)
void Ret(int bytes_dropped, Register scratch)
void I64x4ExtMul(YMMRegister dst, XMMRegister src1, XMMRegister src2, YMMRegister scratch, bool is_signed)
void DropArgumentsAndPushNewReceiver(Register argc, Register receiver, Register scratch)
void LoadFromConstantsTable(Register destination, int constant_index) final
void SmiUntag(Register reg)
void PushImm32(int32_t imm32)
void LoadTaggedFieldWithoutDecompressing(Register destination, Operand field_operand)
void CmpInstanceType(Register map, InstanceType type)
void SmiUntag(Register dst, Operand src)
void Popcntq(Register dst, Operand src)
void AssertNotSmi(Register object) NOOP_UNLESS_DEBUG_CODE
int PushAll(RegList registers)
void StoreSandboxedPointerField(Operand dst_field_operand, Register value)
void ComputeCodeStartAddress(Register dst)
void Move(Register dst, Tagged< Smi > source)
void Lzcntl(Register dst, Register src)
void LoadTaggedRoot(Register destination, RootIndex index)
DISALLOW_IMPLICIT_CONSTRUCTORS(MacroAssembler)
void MovePair(Register dst0, Register src0, Register dst1, Register src1)
void JumpJSFunction(Register function_object, JumpMode jump_mode=JumpMode::kJump)
void Tzcntl(Register dst, Operand src)
void JumpIfNotMarking(Label *not_marking, Label::Distance condition_met_distance=Label::kFar)
Condition CheckSmi(Operand src)
void Move(Register dst, ExternalReference ext)
void I32x8ExtMul(YMMRegister dst, XMMRegister src1, XMMRegister src2, YMMRegister scratch, bool is_signed)
void AssertConstructor(Register object) NOOP_UNLESS_DEBUG_CODE
void CheckMarkBit(Register object, Register scratch0, Register scratch1, Condition cc, Label *condition_met, Label::Distance condition_met_distance=Label::kFar)
void PinsrdPreSse41(XMMRegister dst, Register src2, uint8_t imm8, uint32_t *load_pc_offset=nullptr)
void StoreTaggedField(Operand dst_field_operand, Register value)
void JumpIfRoot(Operand with, RootIndex index, Label *if_equal, Label::Distance if_equal_distance=Label::kFar)
void CallRuntime(const Runtime::Function *f, int num_arguments)
void I16x8TruncF16x8U(YMMRegister dst, XMMRegister src, YMMRegister tmp)
void PushQuad(Operand src)
void JumpIfIsInRange(Register value, unsigned lower_limit, unsigned higher_limit, Label *on_in_range, Label::Distance near_jump=Label::kFar)
void EnterFrame(StackFrame::Type type, bool load_constant_pool_pointer_reg)
void RecordWrite(Register object, Register slot_address, Register value, SaveFPRegsMode save_fp, SmiCheck smi_check=SmiCheck::kInline, ReadOnlyCheck ro_check=ReadOnlyCheck::kInline, SlotDescriptor slot=SlotDescriptor::ForDirectPointerSlot())
void Move(Register dst, intptr_t x)
void Cvtlui2ss(XMMRegister dst, Register src)
void Cvttss2siq(Register dst, Operand src)
void DecrementCounter(StatsCounter *counter, int value)
void Push(Tagged< Smi > smi)
void Cmp(Register dst, int32_t src)
void I32x8SConvertF32x8(YMMRegister dst, YMMRegister src, YMMRegister tmp, Register scratch)
Operand StackLimitAsOperand(StackLimitKind kind)
void Cvtpd2ph(XMMRegister dst, XMMRegister src, Register tmp)
void JumpIfNotSmi(Operand src, Label *on_not_smi, Label::Distance near_jump=Label::kFar)
void Move(Register target, Immediate source)
void F64x4Qfms(YMMRegister dst, YMMRegister src1, YMMRegister src2, YMMRegister src3, YMMRegister tmp)
void Jump(Handle< Code > code_object, RelocInfo::Mode rmode)
void SmiCompare(Register dst, Operand src)
void AssertFeedbackCell(Register object, Register scratch) NOOP_UNLESS_DEBUG_CODE
void Pinsrq(XMMRegister dst, XMMRegister src1, Operand src2, uint8_t imm8, uint32_t *load_pc_offset=nullptr)
void JumpCodeObject(Register code_object, CodeEntrypointTag tag, JumpMode jump_mode=JumpMode::kJump)
void SmiAddConstant(Operand dst, Tagged< Smi > constant)
void CallForDeoptimization(Builtin target, int deopt_id, Label *exit, DeoptimizeKind kind, Label *ret, Label *jump_deoptimization_entry_label)
Operand ExternalReferenceAsOperand(IsolateFieldId id)
void Move(XMMRegister target, XMMRegister source)
void AssertSignedBitOfSmiIsZero(Register smi) NOOP_UNLESS_DEBUG_CODE
void Cvtqsi2ss(XMMRegister dst, Operand src)
void Jump(Operand op, Condition cc)
void AllocateStackSpace(Register bytes)
void Move(XMMRegister dst, double src)
void Cvtss2sd(XMMRegister dst, Operand src)
void JumpIfLessThan(Register a, int32_t b, Label *dest)
void PushReturnAddressFrom(Register src)
void SmiToInt32(Register dst, Register src)
void Cvtlui2sd(XMMRegister dst, Operand src)
void Pop(Register dst)
void Cvttss2ui(Register dst, XMMRegister src, Label *fail=nullptr)
void AssertJSAny(Register object, Register map_tmp, AbortReason abort_reason) NOOP_UNLESS_DEBUG_CODE
void IsObjectTypeInRange(Register heap_object, InstanceType low, InstanceType high, Register scratch)
void AssertBoundFunction(Register object) NOOP_UNLESS_DEBUG_CODE
void SmiTag(Register reg)
void Cvttsd2siq(Register dst, Operand src)
void F16x8Qfms(YMMRegister dst, XMMRegister src1, XMMRegister src2, XMMRegister src3, YMMRegister tmp, YMMRegister tmp2)
void Cvtss2sd(XMMRegister dst, XMMRegister src)
void CallBuiltinByIndex(Register builtin_index)
void PrepareCallCFunction(int num_arguments)
void OptimizeCodeOrTailCallOptimizedCodeSlot(Register flags, Register feedback_vector)
void DecompressTagged(Register destination, Operand field_operand)
void Move(Operand dst, intptr_t x)
void JumpToExternalReference(const ExternalReference &ext, bool builtin_exit_frame=false)
void F32x8Splat(YMMRegister dst, XMMRegister src)
void Move(XMMRegister dst, uint64_t src)
void I64x4Mul(YMMRegister dst, YMMRegister lhs, YMMRegister rhs, YMMRegister tmp1, YMMRegister tmp2)
void CallRecordWriteStub(Register object, Register slot_address, SaveFPRegsMode fp_mode, StubCallMode mode=StubCallMode::kCallBuiltinPointer)
void CallDebugOnFunctionCall(Register fun, Register new_target, Register expected_parameter_count_or_dispatch_handle, Register actual_parameter_count)
Condition CheckFeedbackVectorFlagsNeedsProcessing(Register feedback_vector, CodeKind current_code_kind)
void TryLoadOptimizedOsrCode(Register scratch_and_result, CodeKind min_opt_level, Register feedback_vector, FeedbackSlot slot, Label *on_result, Label::Distance distance)
void AssertUndefinedOrAllocationSite(Register object) NOOP_UNLESS_DEBUG_CODE
void MemoryChunkHeaderFromObject(Register object, Register header)
void F64x4Min(YMMRegister dst, YMMRegister lhs, YMMRegister rhs, YMMRegister scratch)
void SmiUntagUnsigned(Register dst, Operand src)
void IncsspqIfSupported(Register number_of_words, Register scratch)
void SmiCompare(Register smi1, Register smi2)
void Lzcntq(Register dst, Register src)
void DecompressTagged(Register destination, Tagged_t immediate)
void Cmpeqsd(XMMRegister dst, XMMRegister src)
void F32x8Qfma(YMMRegister dst, YMMRegister src1, YMMRegister src2, YMMRegister src3, YMMRegister tmp)
void MoveNonSmi(Register dst, double value)
void Tzcntq(Register dst, Operand src)
void StubPrologue(StackFrame::Type type)
void StoreTaggedSignedField(Operand dst_field_operand, Tagged< Smi > value)
void TestCodeIsMarkedForDeoptimization(Register code)
void TestCodeIsTurbofanned(Register code)
void DecompressTagged(Register destination, Register source)
Condition CheckSmi(Register src)
void LoadTaggedField(TaggedRegister destination, Operand field_operand)
void LoadTrustedPointerField(Register destination, Operand field_operand, IndirectPointerTag tag, Register scratch)
void StoreRootRelative(int32_t offset, Register value) final
void F16x8Min(YMMRegister dst, XMMRegister lhs, XMMRegister rhs, YMMRegister scratch, YMMRegister scratch2)
void Cmp(Operand dst, Tagged< Smi > src)
void LoadMap(Register destination, Register object)
void LoadFeedbackVector(Register dst, Register closure, Label *fbv_undef, Label::Distance distance)
void JumpIfSmi(Register src, Label *on_smi, Label::Distance near_jump=Label::kFar)
void TailCallRuntime(Runtime::FunctionId fid)
void CallRuntime(Runtime::FunctionId fid, int num_arguments)
void DecompressProtected(Register destination, Operand field_operand)
void LoadNativeContextSlot(Register dst, int index)
void CallRecordWriteStubSaveRegisters(Register object, Register slot_address, SaveFPRegsMode fp_mode, StubCallMode mode=StubCallMode::kCallBuiltinPointer)
void Tzcntl(Register dst, Register src)
void Cvtqui2ss(XMMRegister dst, Operand src)
int RequiredStackSizeForCallerSaved(SaveFPRegsMode fp_mode, Register exclusion=no_reg) const
void S256Select(YMMRegister dst, YMMRegister mask, YMMRegister src1, YMMRegister src2, YMMRegister scratch)
void PopReturnAddressTo(Register dst)
Register GetSmiConstant(Tagged< Smi > value)
void AssertSmiOrHeapObjectInMainCompressionCage(Register object) NOOP_UNLESS_DEBUG_CODE
void DropArguments(Register count)
void AssertCallableFunction(Register object) NOOP_UNLESS_DEBUG_CODE
void I32x8TruncF32x8U(YMMRegister dst, YMMRegister src, YMMRegister scratch1, YMMRegister scratch2)
DISALLOW_IMPLICIT_CONSTRUCTORS(StackArgumentsAccessor)
Operand GetArgumentOperand(int index) const
#define NOOP_UNLESS_DEBUG_CODE
Definition assembler.h:628
#define COMPRESS_POINTERS_BOOL
Definition globals.h:99
DirectHandle< Object > new_target
Definition execution.cc:75
Label label
Isolate * isolate
int32_t offset
TNode< Object > receiver
LiftoffRegister reg
int x
uint32_t const mask
#define DECLARE_ISPLAT(name, suffix, instr_mov)
#define DECLARE_IEXTADDPAIRWISE(ExtAddPairwiseOp)
#define MACRO_ASM_X64_ISPLAT_LIST(V)
#define MACRO_ASM_X64_IEXTADDPAIRWISE_LIST(V)
ReadOnlyCheck
SmiCheck
ComparisonMode
ArgumentAdaptionMode
InvokeType
SetIsolateDataSlots
JumpMode
RegListBase< RegisterT > registers
InstructionOperand destination
int r
Definition mul-fft.cc:298
constexpr Register no_reg
constexpr Register kRootRegister
MemOperand ExitFrameCallerStackSlotOperand(int index)
const int kSmiTagSize
Definition v8-internal.h:87
Operand FieldOperand(Register object, int offset)
Address Tagged_t
Definition globals.h:547
constexpr int kSmiShift
MemOperand FieldMemOperand(Register object, int offset)
constexpr int kSystemPointerSize
Definition globals.h:410
constexpr Register kScratchRegister
const int kHeapObjectTag
Definition v8-internal.h:72
const int kSmiShiftSize
V8_EXPORT_PRIVATE FlagValues v8_flags
constexpr Register kPtrComprCageBaseRegister
void CallApiFunctionAndReturn(MacroAssembler *masm, bool with_profiling, Register function_address, ExternalReference thunk_ref, Register thunk_arg, int slots_to_drop_on_return, MemOperand *argc_operand, MemOperand return_value_operand)
constexpr Register kJSFunctionRegister
MemOperand ExitFrameStackSlotOperand(int offset)
#define UNREACHABLE()
Definition logging.h:67
#define DCHECK_NE(v1, v2)
Definition logging.h:486
#define DCHECK_GE(v1, v2)
Definition logging.h:488
#define DCHECK(condition)
Definition logging.h:482
#define V8_EXPORT_PRIVATE
Definition macros.h:460
SmiIndex(Register index_register, ScaleFactor scale)