5#ifndef V8_CODEGEN_ARM64_INSTRUCTIONS_ARM64_H_
6#define V8_CODEGEN_ARM64_INSTRUCTIONS_ARM64_H_
19struct AssemblerOptions;
24using Instr = uint32_t;
97 uint32_t
Bits(
int msb,
int lsb)
const {
124#define DEFINE_GETTER(Name, HighBit, LowBit, Func) \
125 int32_t Name() const { return Func(HighBit, LowBit); }
133 int offset = (
static_cast<uint32_t
>(ImmPCRelHi()) << ImmPCRelLo_width) |
135 int width = ImmPCRelLo_width + ImmPCRelHi_width;
158 int size = NEONLSSize();
159 int index = (q << 3) | (s << 2) |
size;
160 return index >> access_size_shift;
184 uint32_t bits = imm8;
185 uint32_t bit7 = (bits >> 7) & 0x1;
186 uint32_t bit6 = (bits >> 6) & 0x1;
187 uint32_t bit5_to_0 = bits & 0x3f;
188 uint32_t
result = (bit7 << 31) | ((32 - bit6) << 25) | (bit5_to_0 << 19);
198 uint32_t bits = imm8;
199 uint64_t bit7 = (bits >> 7) & 0x1;
200 uint64_t bit6 = (bits >> 6) & 0x1;
201 uint64_t bit5_to_0 = bits & 0x3f;
202 uint64_t
result = (bit7 << 63) | ((256 - bit6) << 54) | (bit5_to_0 << 48);
312 switch (branch_type) {
314 return ImmUncondBranch_width;
316 return ImmCondBranch_width;
318 return ImmCmpBranch_width;
320 return ImmTestBranch_width;
335 return ImmCondBranch();
337 return ImmUncondBranch();
339 return ImmCmpBranch();
341 return ImmTestBranch();
352 int32_t high16 = ImmException();
353 int32_t low16 =
following()->ImmException();
354 return (high16 << 16) | low16;
388 int imm_hint = ImmHint();
438 return reinterpret_cast<uintptr_t
>(
this) +
offset;
457 template <
typename T>
471 template <ImmBranchType branch_type>
477 Instr branch_imm = 0;
478 uint32_t imm_mask = 0;
479 switch (branch_type) {
482 static_assert(ImmCondBranch_mask == ImmCmpBranch_mask);
483 static_assert(ImmCondBranch_offset == ImmCmpBranch_offset);
486 branch_imm = checked_truncate_to_int19(
offset) << ImmCondBranch_offset;
487 imm_mask = ImmCondBranch_mask;
490 branch_imm = checked_truncate_to_int26(
offset)
491 << ImmUncondBranch_offset;
492 imm_mask = ImmUncondBranch_mask;
495 branch_imm = checked_truncate_to_int14(
offset) << ImmTestBranch_offset;
496 imm_mask = ImmTestBranch_mask;
732 {19, 18, 17, 16, 30},
767 {
NF_UNDEF,
NF_B,
NF_H,
NF_B,
NF_S,
NF_B,
NF_H,
NF_B,
NF_D,
NF_B,
NF_H,
V8_EXPORT_PRIVATE void SetInstructionBits(Instr new_instr, WritableJitAllocation *jit_allocation=nullptr)
Instr InstructionBits() const
int32_t SignedBits(int msb, int lsb) const
static constexpr int32_t ImmBranchRange(ImmBranchType branch_type)
bool IsCompareBranch() const
V8_INLINE const Instruction * InstructionAtOffset(int64_t offset, CheckAlignment check=CHECK_ALIGNMENT) const
static bool IsValidPCRelOffset(ptrdiff_t offset)
double ImmNEONFP64() const
bool IsBranchAndLink() const
static constexpr bool IsValidImmPCOffset(ImmBranchType branch_type, ptrdiff_t offset)
void SetPCRelImmTarget(Zone *zone, AssemblerOptions options, Instruction *target)
void SetUnresolvedInternalReferenceImmTarget(Zone *zone, AssemblerOptions options, Instruction *target)
static float Imm8ToFP32(uint32_t imm8)
bool IsUnconditionalBranch() const
Instr Mask(uint32_t mask) const
bool IsAddSubExtended() const
unsigned ImmNEONabcdefgh() const
void SetImmPCOffsetTarget(Zone *zone, AssemblerOptions options, Instruction *target)
bool IsUncondBranchImm() const
V8_EXPORT_PRIVATE int64_t ImmPCOffset()
bool IsTestBranch() const
bool IsBranchAndLinkToRegister() const
bool IsLdrLiteral() const
bool IsAddSubImmediate() const
float ImmNEONFP32() const
bool IsLdrLiteralW() const
V8_EXPORT_PRIVATE Instruction * ImmPCOffsetTarget()
uintptr_t LiteralAddress()
bool IsCondBranchImm() const
bool IsAddSubShifted() const
bool IsLoadOrStore() const
bool IsLdrLiteralX() const
bool IsTargetInImmPCOffsetRange(Instruction *target)
static const int ImmPCRelRangeBitwidth
V8_INLINE Instruction * InstructionAtOffset(int64_t offset, CheckAlignment check=CHECK_ALIGNMENT)
V8_INLINE Instruction * preceding(int count=1)
int ImmUnresolvedInternalReference() const
int NEONLSIndex(int access_size_shift) const
V8_INLINE Instr InstructionBits() const
void SetImmLLiteral(Instruction *source)
V8_INLINE const Instruction * preceding(int count=1) const
V8_EXPORT_PRIVATE void SetInstructionBits(Instr value, WritableJitAllocation *jit_allocation=nullptr)
V8_INLINE const Instruction * following(int count=1) const
unsigned SizeLSPair() const
static double Imm8ToFP64(uint32_t imm8)
bool IsUnresolvedInternalReference() const
bool IsLogicalImmediate() const
V8_INLINE Instruction * following(int count=1)
ImmBranchType BranchType() const
static V8_INLINE Instruction * Cast(T src)
V8_INLINE ptrdiff_t DistanceTo(Instruction *target)
static constexpr int ImmBranchRangeBitwidth(ImmBranchType branch_type)
uint32_t Bits(int msb, int lsb) const
void SetBranchImmTarget(Instruction *target, WritableJitAllocation *jit_allocation=nullptr)
bool IsPCRelAddressing() const
#define INSTRUCTION_FIELDS_LIST(V_)
#define DEFINE_GETTER(Name, HighBit, LowBit, Func)
ZoneVector< RpoNumber > & result
static V ReadUnalignedValue(Address p)
V8_INLINE Dest bit_cast(Source const &source)
constexpr LoadStoreAnyOp LoadStoreAnyFixed
constexpr LogicalShiftedOp LogicalShiftedMask
constexpr AddrMode PreIndex
constexpr TestBranchOp TestBranchFMask
constexpr UnconditionalBranchOp BL
static const unsigned kNEONFormatMaxBits
constexpr CompareBranchOp CompareBranchFixed
V8_EXPORT_PRIVATE const float kFP32SignallingNaN
constexpr ExceptionOp BRK
constexpr ExceptionOp ExceptionFixed
const Instr kImmExceptionIsRedirectedCall
constexpr UnconditionalBranchToRegisterOp BLR
const unsigned kDebuggerTracingDirectivesMask
V8_EXPORT_PRIVATE const float kFP32PositiveInfinity
constexpr SystemHintOp SystemHintFMask
constexpr AddSubOp AddSubSetFlagsBit
const float16 kFP16NegativeInfinity
int32_t signed_bitextract_32(int msb, int lsb, uint32_t x)
static const unsigned kPrintfArgPatternBits
constexpr TestBranchOp TestBranchFixed
constexpr AddrMode Offset
const unsigned kPrintfLength
V8_EXPORT_PRIVATE const double kFP64DefaultNaN
const float16 kFP16PositiveInfinity
V8_EXPORT_PRIVATE const float kFP32DefaultNaN
constexpr ConditionalBranchOp ConditionalBranchFixed
constexpr LoadStoreAnyOp LoadStoreAnyFMask
V8_EXPORT_PRIVATE const double kFP64PositiveInfinity
constexpr LoadLiteralOp LoadLiteralFMask
const unsigned kDebugParamsOffset
constexpr LogicalOp LogicalOpMask
constexpr uint8_t kLoadLiteralScale
const unsigned kPrintfMaxArgCount
constexpr AddSubExtendedOp AddSubExtendedFMask
constexpr ExceptionOp ExceptionMask
constexpr LoadStorePairOp LoadStorePairMask
constexpr PCRelAddressingOp PCRelAddressingFMask
constexpr LoadLiteralOp LoadLiteralFixed
const float16 kFP16DefaultNaN
constexpr uint8_t kInstrSizeLog2
const Instr kImmExceptionIsPrintf
constexpr MoveWideImmediateOp MOVZ_x
const unsigned kPrintfArgPatternListOffset
constexpr AddSubShiftedOp AddSubShiftedFixed
unsigned CalcLSDataSizeLog2(LoadStoreOp op)
const Instr kImmExceptionIsSwitchStackLimit
constexpr bool is_intn(int64_t x, unsigned n)
constexpr SystemHintOp SystemHintFixed
constexpr PCRelAddressingOp PCRelAddressingFixed
constexpr MoveWideImmediateOp MoveWideImmediateMask
constexpr SystemPAuthOp SystemPAuthFixed
constexpr UnconditionalBranchOp UnconditionalBranchFMask
V8_EXPORT_PRIVATE const float kFP32QuietNaN
constexpr LogicalImmediateOp LogicalImmediateMask
const Instr kImmExceptionIsDebug
constexpr MoveWideImmediateOp MOVK_w
constexpr LoadLiteralOp LoadLiteralMask
constexpr AddSubExtendedOp AddSubExtendedFixed
constexpr LoadLiteralOp LDR_w_lit
constexpr PCRelAddressingOp PCRelAddressingMask
constexpr CompareBranchOp CompareBranchFMask
constexpr LogicalImmediateOp LogicalImmediateFixed
constexpr UnconditionalBranchOp UnconditionalBranchMask
constexpr LoadStoreOp LoadStoreMask
constexpr AddSubImmediateOp AddSubImmediateFixed
constexpr UnconditionalBranchToRegisterOp UnconditionalBranchToRegisterMask
constexpr AddrMode PostIndex
constexpr SystemPAuthOp SystemPAuthFMask
V8_EXPORT_PRIVATE const float kFP32NegativeInfinity
constexpr ConditionalBranchOp ConditionalBranchFMask
constexpr MoveWideImmediateOp MOVN_x
V8_EXPORT_PRIVATE const double kFP64NegativeInfinity
constexpr AddSubShiftedOp AddSubShiftedFMask
constexpr uint8_t kInstrSize
constexpr ExceptionOp ExceptionFMask
constexpr MoveWideImmediateOp MOVK_x
constexpr MoveWideImmediateOp MOVN_w
V8_EXPORT_PRIVATE const double kFP64SignallingNaN
constexpr UnconditionalBranchOp UnconditionalBranchFixed
constexpr LoadLiteralOp LDR_x_lit
const Instr kImmExceptionIsUnreachable
const unsigned kDebugCodeOffset
constexpr LogicalImmediateOp LogicalImmediateFMask
constexpr MoveWideImmediateOp MOVZ_w
V8_EXPORT_PRIVATE const double kFP64QuietNaN
uint32_t unsigned_bitextract_32(int msb, int lsb, uint32_t x)
constexpr LogicalShiftedOp ORR_x
const unsigned kPrintfArgCountOffset
constexpr PCRelAddressingOp ADR
unsigned CalcLSPairDataSize(LoadStorePairOp op)
const unsigned kDebugMessageOffset
constexpr AddSubImmediateOp AddSubImmediateFMask
#define DCHECK(condition)
#define DCHECK_EQ(v1, v2)
#define V8_EXPORT_PRIVATE
constexpr bool IsAligned(T value, U alignment)