100void AssemblerRISCVV::vrgather_vi(VRegister vd, VRegister vs2, int8_t imm5,
106void AssemblerRISCVV::vrgather_vx(VRegister vd, VRegister vs2, Register rs1,
121#define DEFINE_OPIVV(name, funct6) \
122 void AssemblerRISCVV::name##_vv(VRegister vd, VRegister vs2, VRegister vs1, \
124 GenInstrV(funct6, OP_IVV, vd, vs1, vs2, mask); \
127#define DEFINE_OPFVV(name, funct6) \
128 void AssemblerRISCVV::name##_vv(VRegister vd, VRegister vs2, VRegister vs1, \
130 GenInstrV(funct6, OP_FVV, vd, vs1, vs2, mask); \
133#define DEFINE_OPFWV(name, funct6) \
134 void AssemblerRISCVV::name##_wv(VRegister vd, VRegister vs2, VRegister vs1, \
136 GenInstrV(funct6, OP_FVV, vd, vs1, vs2, mask); \
139#define DEFINE_OPFRED(name, funct6) \
140 void AssemblerRISCVV::name##_vs(VRegister vd, VRegister vs2, VRegister vs1, \
142 GenInstrV(funct6, OP_FVV, vd, vs1, vs2, mask); \
145#define DEFINE_OPIVX(name, funct6) \
146 void AssemblerRISCVV::name##_vx(VRegister vd, VRegister vs2, Register rs1, \
148 GenInstrV(funct6, OP_IVX, vd, rs1, vs2, mask); \
151#define DEFINE_OPIVI(name, funct6) \
152 void AssemblerRISCVV::name##_vi(VRegister vd, VRegister vs2, int8_t imm5, \
154 GenInstrV(funct6, vd, imm5, vs2, mask); \
157#define DEFINE_OPMVV(name, funct6) \
158 void AssemblerRISCVV::name##_vv(VRegister vd, VRegister vs2, VRegister vs1, \
160 GenInstrV(funct6, OP_MVV, vd, vs1, vs2, mask); \
166#define DEFINE_OPMVX(name, funct6) \
167 void AssemblerRISCVV::name##_vx(VRegister vd, VRegister vs2, Register rs1, \
169 GenInstrV(funct6, OP_MVX, vd, rs1, vs2, mask); \
172#define DEFINE_OPFVF(name, funct6) \
173 void AssemblerRISCVV::name##_vf(VRegister vd, VRegister vs2, \
174 FPURegister fs1, MaskType mask) { \
175 GenInstrV(funct6, OP_FVF, vd, fs1, vs2, mask); \
178#define DEFINE_OPFWF(name, funct6) \
179 void AssemblerRISCVV::name##_wf(VRegister vd, VRegister vs2, \
180 FPURegister fs1, MaskType mask) { \
181 GenInstrV(funct6, OP_FVF, vd, fs1, vs2, mask); \
184#define DEFINE_OPFVV_FMA(name, funct6) \
185 void AssemblerRISCVV::name##_vv(VRegister vd, VRegister vs1, VRegister vs2, \
187 GenInstrV(funct6, OP_FVV, vd, vs1, vs2, mask); \
190#define DEFINE_OPFVF_FMA(name, funct6) \
191 void AssemblerRISCVV::name##_vf(VRegister vd, FPURegister fs1, \
192 VRegister vs2, MaskType mask) { \
193 GenInstrV(funct6, OP_FVF, vd, fs1, vs2, mask); \
197#define DEFINE_OPMVV_VIE(name, vs1) \
198 void AssemblerRISCVV::name(VRegister vd, VRegister vs2, MaskType mask) { \
199 GenInstrV(VXUNARY0_FUNCT6, OP_MVV, vd, vs1, vs2, mask); \
415#undef DEFINE_OPFVV_FMA
416#undef DEFINE_OPFVF_FMA
417#undef DEFINE_OPMVV_VIE
432 int32_t zimm =
GenZimm(vsew, vlmul, tail,
mask) & 0x3FF;
541 DCHECK(is_uint5(imm5) || is_int5(imm5));
552 uint8_t IsMop,
bool IsMew, uint8_t Nf) {
566 uint8_t IsMop,
bool IsMew, uint8_t Nf) {
581 uint8_t IsMop,
bool IsMew, uint8_t Nf) {
648void AssemblerRISCVV::vlseg3(VRegister vd, Register rs1, uint8_t lumop,
654void AssemblerRISCVV::vlseg4(VRegister vd, Register rs1, uint8_t lumop,
660void AssemblerRISCVV::vlseg5(VRegister vd, Register rs1, uint8_t lumop,
666void AssemblerRISCVV::vlseg6(VRegister vd, Register rs1, uint8_t lumop,
672void AssemblerRISCVV::vlseg7(VRegister vd, Register rs1, uint8_t lumop,
678void AssemblerRISCVV::vlseg8(VRegister vd, Register rs1, uint8_t lumop,
683void AssemblerRISCVV::vsseg2(VRegister vd, Register rs1, uint8_t sumop,
688void AssemblerRISCVV::vsseg3(VRegister vd, Register rs1, uint8_t sumop,
693void AssemblerRISCVV::vsseg4(VRegister vd, Register rs1, uint8_t sumop,
698void AssemblerRISCVV::vsseg5(VRegister vd, Register rs1, uint8_t sumop,
703void AssemblerRISCVV::vsseg6(VRegister vd, Register rs1, uint8_t sumop,
708void AssemblerRISCVV::vsseg7(VRegister vd, Register rs1, uint8_t sumop,
713void AssemblerRISCVV::vsseg8(VRegister vd, Register rs1, uint8_t sumop,
719void AssemblerRISCVV::vlsseg2(VRegister vd, Register rs1, Register rs2,
724void AssemblerRISCVV::vlsseg3(VRegister vd, Register rs1, Register rs2,
729void AssemblerRISCVV::vlsseg4(VRegister vd, Register rs1, Register rs2,
734void AssemblerRISCVV::vlsseg5(VRegister vd, Register rs1, Register rs2,
739void AssemblerRISCVV::vlsseg6(VRegister vd, Register rs1, Register rs2,
744void AssemblerRISCVV::vlsseg7(VRegister vd, Register rs1, Register rs2,
749void AssemblerRISCVV::vlsseg8(VRegister vd, Register rs1, Register rs2,
754void AssemblerRISCVV::vssseg2(VRegister vd, Register rs1, Register rs2,
759void AssemblerRISCVV::vssseg3(VRegister vd, Register rs1, Register rs2,
764void AssemblerRISCVV::vssseg4(VRegister vd, Register rs1, Register rs2,
769void AssemblerRISCVV::vssseg5(VRegister vd, Register rs1, Register rs2,
774void AssemblerRISCVV::vssseg6(VRegister vd, Register rs1, Register rs2,
779void AssemblerRISCVV::vssseg7(VRegister vd, Register rs1, Register rs2,
784void AssemblerRISCVV::vssseg8(VRegister vd, Register rs1, Register rs2,
790void AssemblerRISCVV::vlxseg2(VRegister vd, Register rs1, VRegister rs2,
795void AssemblerRISCVV::vlxseg3(VRegister vd, Register rs1, VRegister rs2,
800void AssemblerRISCVV::vlxseg4(VRegister vd, Register rs1, VRegister rs2,
805void AssemblerRISCVV::vlxseg5(VRegister vd, Register rs1, VRegister rs2,
810void AssemblerRISCVV::vlxseg6(VRegister vd, Register rs1, VRegister rs2,
815void AssemblerRISCVV::vlxseg7(VRegister vd, Register rs1, VRegister rs2,
820void AssemblerRISCVV::vlxseg8(VRegister vd, Register rs1, VRegister rs2,
825void AssemblerRISCVV::vsxseg2(VRegister vd, Register rs1, VRegister rs2,
830void AssemblerRISCVV::vsxseg3(VRegister vd, Register rs1, VRegister rs2,
835void AssemblerRISCVV::vsxseg4(VRegister vd, Register rs1, VRegister rs2,
840void AssemblerRISCVV::vsxseg5(VRegister vd, Register rs1, VRegister rs2,
845void AssemblerRISCVV::vsxseg6(VRegister vd, Register rs1, VRegister rs2,
850void AssemblerRISCVV::vsxseg7(VRegister vd, Register rs1, VRegister rs2,
855void AssemblerRISCVV::vsxseg8(VRegister vd, Register rs1, VRegister rs2,
871#ifdef CAN_USE_RVV_INSTRUCTIONS
void vsu(VRegister vd, Register rs1, VRegister vs3, VSew vsew, MaskType mask=NoMask)
void vmadc_vv(VRegister vd, VRegister vs1, VRegister vs2)
void vmerge_vx(VRegister vd, Register rs1, VRegister vs2)
void vls(VRegister vd, Register rs1, Register rs2, VSew vsew, MaskType mask=NoMask)
void vredmin_vs(VRegister vd, VRegister vs2, VRegister vs1, MaskType mask=NoMask)
void vmerge_vv(VRegister vd, VRegister vs1, VRegister vs2)
void vmv_vi(VRegister vd, uint8_t simm5)
static int32_t GenZimm(VSew vsew, Vlmul vlmul, TailAgnosticType tail=tu, MaskAgnosticType mask=mu)
void vadc_vv(VRegister vd, VRegister vs1, VRegister vs2)
void vredmax_vs(VRegister vd, VRegister vs2, VRegister vs1, MaskType mask=NoMask)
void vmv_xs(Register rd, VRegister vs2)
void vfmerge_vf(VRegister vd, FPURegister fs1, VRegister vs2)
void vredminu_vs(VRegister vd, VRegister vs2, VRegister vs1, MaskType mask=NoMask)
void vmadc_vx(VRegister vd, Register rs1, VRegister vs2)
void vredmaxu_vs(VRegister vd, VRegister vs2, VRegister vs1, MaskType mask=NoMask)
void vfmv_vf(VRegister vd, FPURegister fs1)
void vlx(VRegister vd, Register rs1, VRegister vs3, VSew vsew, MaskType mask=NoMask)
void vadc_vx(VRegister vd, Register rs1, VRegister vs2)
void vmerge_vi(VRegister vd, uint8_t imm5, VRegister vs2)
void vsetvli(Register rd, Register rs1, VSew vsew, Vlmul vlmul, TailAgnosticType tail=tu, MaskAgnosticType mask=mu)
void vsetivli(Register rd, uint8_t uimm, VSew vsew, Vlmul vlmul, TailAgnosticType tail=tu, MaskAgnosticType mask=mu)
void vsx(VRegister vd, Register rs1, VRegister vs3, VSew vsew, MaskType mask=NoMask)
void vfirst_m(Register rd, VRegister vs2, MaskType mask=NoMask)
void vfmv_fs(FPURegister fd, VRegister vs2)
void vmadc_vi(VRegister vd, uint8_t imm5, VRegister vs2)
void vsetvl(VSew vsew, Vlmul vlmul, TailAgnosticType tail=tu, MaskAgnosticType mask=mu)
void GenInstrV(Register rd, Register rs1, Register rs2)
void vwaddu_wx(VRegister vd, VRegister vs2, Register rs1, MaskType mask=NoMask)
void vl(VRegister vd, Register rs1, uint8_t lumop, VSew vsew, MaskType mask=NoMask)
void vcpop_m(Register rd, VRegister vs2, MaskType mask=NoMask)
void vfmv_sf(VRegister vd, FPURegister fs)
void vmv_sx(VRegister vd, Register rs1)
void vmv_vx(VRegister vd, Register rs1)
void vss(VRegister vd, Register rs1, Register rs2, VSew vsew, MaskType mask=NoMask)
void vs(VRegister vd, Register rs1, uint8_t sumop, VSew vsew, MaskType mask=NoMask)
void vid_v(VRegister vd, MaskType mask=Mask)
void vadc_vi(VRegister vd, uint8_t imm5, VRegister vs2)
virtual void emit(Instr x)=0
LoadStoreLaneParams(MachineRepresentation rep, uint8_t laneidx)
constexpr int8_t code() const
#define DEFINE_OPFVV(name, funct6)
#define DEFINE_OPFWF(name, funct6)
#define DEFINE_OPFWV(name, funct6)
#define DEFINE_OPFVV_FMA(name, funct6)
#define DEFINE_OPIVV(name, funct6)
#define DEFINE_OPMVV(name, funct6)
#define DEFINE_OPMVX(name, funct6)
#define DEFINE_OPFVF(name, funct6)
#define DEFINE_OPMVV_VIE(name, vs1)
#define DEFINE_OPFRED(name, funct6)
#define DEFINE_OPFVF_FMA(name, funct6)
#define DEFINE_OPIVX(name, funct6)
#define DEFINE_OPIVI(name, funct6)
constexpr Opcode VMAXU_FUNCT6
const uint32_t kRvvMewMask
constexpr Opcode VFMACC_FUNCT6
constexpr Opcode VMUL_FUNCT6
constexpr Opcode VMSGTU_FUNCT6
constexpr Opcode VMULHU_FUNCT6
constexpr Opcode VWADDU_FUNCT6
constexpr Opcode VSMUL_FUNCT6
constexpr Opcode VFMADD_FUNCT6
constexpr Opcode VFDIV_FUNCT6
const uint32_t kRvvNfMask
constexpr Opcode VFWMACC_FUNCT6
constexpr Opcode VREDMAXU_FUNCT6
constexpr Opcode VMADC_FUNCT6
constexpr Opcode VMSLE_FUNCT6
constexpr Opcode VWADD_FUNCT6
constexpr Opcode VFSGNJ_FUNCT6
constexpr Opcode VRSUB_FUNCT6
constexpr Opcode VFMSUB_FUNCT6
constexpr Opcode VFSUB_FUNCT6
constexpr Opcode VMULH_FUNCT6
const uint32_t kRvvMopMask
constexpr Opcode VFWSUB_W_FUNCT6
constexpr Opcode VWMULU_FUNCT6
constexpr Opcode VFWNMACC_FUNCT6
constexpr Opcode VSLL_FUNCT6
constexpr Opcode VSSUB_FUNCT6
constexpr Opcode VMV_FUNCT6
constexpr Opcode VXOR_FUNCT6
constexpr Opcode VWMUL_FUNCT6
constexpr Opcode VDIV_FUNCT6
constexpr Opcode VFREDMAX_FUNCT6
constexpr Opcode VMSLT_FUNCT6
const uint32_t kRvvZimmMask
constexpr Opcode VMSNE_FUNCT6
constexpr Opcode VFNMADD_FUNCT6
constexpr Opcode VSADDU_FUNCT6
constexpr Opcode VMAX_FUNCT6
constexpr Opcode VOR_FUNCT6
constexpr Opcode VAND_FUNCT6
constexpr Opcode VMSLTU_FUNCT6
const uint32_t kRvvWidthMask
constexpr Opcode VFWNMSAC_FUNCT6
constexpr Opcode VMFEQ_FUNCT6
constexpr Opcode VREDMAX_FUNCT6
constexpr Opcode VFMSAC_FUNCT6
constexpr Opcode VFADD_FUNCT6
constexpr Opcode VCOMPRESS_FUNCT6
constexpr Opcode VADC_FUNCT6
constexpr Opcode VREDMINU_FUNCT6
constexpr Opcode VSLIDEDOWN_FUNCT6
constexpr Opcode VFSGNJX_FUNCT6
constexpr Opcode VMINU_FUNCT6
constexpr Opcode VRXUNARY0_FUNCT6
uint8_t vsew_switch(VSew vsew)
constexpr Opcode VFMAX_FUNCT6
const uint32_t kRvvVmMask
constexpr Opcode VSRL_FUNCT6
constexpr Opcode VWXUNARY0_FUNCT6
const uint32_t kRvvImm5Mask
const int kRvvFunct6Shift
constexpr Opcode VWFUNARY0_FUNCT6
const uint32_t kRvvRs1Mask
constexpr Opcode VMFLT_FUNCT6
constexpr Opcode VFWREDOSUM_FUNCT6
constexpr Opcode VFMIN_FUNCT6
constexpr Opcode VFNMACC_FUNCT6
constexpr Opcode VFMUL_FUNCT6
constexpr Opcode VMUNARY0_FUNCT6
constexpr Opcode VDIVU_FUNCT6
const uint32_t kRvvRs2Mask
constexpr Opcode VNCLIPU_FUNCT6
constexpr Opcode VMIN_FUNCT6
constexpr Opcode VMSLEU_FUNCT6
constexpr Opcode VSLIDEUP_FUNCT6
constexpr Opcode VFWSUB_FUNCT6
constexpr Opcode VMSGT_FUNCT6
constexpr Opcode VSSUBU_FUNCT6
constexpr Opcode VADD_FUNCT6
constexpr Opcode VFNMSUB_FUNCT6
constexpr Opcode VFSGNJN_FUNCT6
constexpr Opcode VMFLE_FUNCT6
constexpr Opcode VMULHSU_FUNCT6
constexpr Opcode VSUB_FUNCT6
constexpr Opcode VFNMSAC_FUNCT6
constexpr Opcode VMFNE_FUNCT6
constexpr Opcode VNCLIP_FUNCT6
constexpr Opcode VMSEQ_FUNCT6
constexpr Opcode VFWADD_FUNCT6
constexpr Opcode VFWMUL_FUNCT6
constexpr Opcode VREDMIN_FUNCT6
constexpr Opcode VFWADD_W_FUNCT6
constexpr Opcode VFWMSAC_FUNCT6
const uint32_t kRvvVdMask
constexpr Opcode VRFUNARY0_FUNCT6
constexpr Opcode VWADDUW_FUNCT6
constexpr Opcode VSRA_FUNCT6
constexpr Opcode VFWREDUSUM_FUNCT6
constexpr Opcode VRGATHER_FUNCT6
constexpr Opcode VSADD_FUNCT6
#define DCHECK_NE(v1, v2)
#define DCHECK(condition)