SYSREG ((n), (e), (f) | F_ARCHEXT, \
AARCH64_FEATURE_##fe1 | AARCH64_FEATURE_##fe2)
-#define SR_V8_1_A(n,e,f) SR_FEAT2(n,e,f,V8_A,V8_1)
-#define SR_V8_4_A(n,e,f) SR_FEAT2(n,e,f,V8_A,V8_4)
-
-#define SR_V8_A(n,e,f) SR_FEAT (n,e,f,V8_A)
-#define SR_V8_R(n,e,f) SR_FEAT (n,e,f,V8_R)
-#define SR_V8_1(n,e,f) SR_FEAT (n,e,f,V8_1)
-#define SR_V8_2(n,e,f) SR_FEAT (n,e,f,V8_2)
-#define SR_V8_3(n,e,f) SR_FEAT (n,e,f,V8_3)
-#define SR_V8_4(n,e,f) SR_FEAT (n,e,f,V8_4)
-#define SR_V8_6(n,e,f) SR_FEAT (n,e,f,V8_6)
-#define SR_V8_7(n,e,f) SR_FEAT (n,e,f,V8_7)
-#define SR_V8_8(n,e,f) SR_FEAT (n,e,f,V8_8)
+#define SR_V8_1_A(n,e,f) SR_FEAT2(n,e,f,V8A,V8_1A)
+#define SR_V8_4_A(n,e,f) SR_FEAT2(n,e,f,V8A,V8_4A)
+
+#define SR_V8A(n,e,f) SR_FEAT (n,e,f,V8A)
+#define SR_V8R(n,e,f) SR_FEAT (n,e,f,V8R)
+#define SR_V8_1A(n,e,f) SR_FEAT (n,e,f,V8_1A)
+#define SR_V8_2A(n,e,f) SR_FEAT (n,e,f,V8_2A)
+#define SR_V8_3A(n,e,f) SR_FEAT (n,e,f,V8_3A)
+#define SR_V8_4A(n,e,f) SR_FEAT (n,e,f,V8_4A)
+#define SR_V8_6A(n,e,f) SR_FEAT (n,e,f,V8_6A)
+#define SR_V8_7A(n,e,f) SR_FEAT (n,e,f,V8_7A)
+#define SR_V8_8A(n,e,f) SR_FEAT (n,e,f,V8_8A)
/* Has no separate libopcodes feature flag, but separated out for clarity. */
#define SR_GIC(n,e,f) SR_CORE (n,e,f)
/* Has no separate libopcodes feature flag, but separated out for clarity. */
-#define SR_AMU(n,e,f) SR_FEAT (n,e,f,V8_4)
+#define SR_AMU(n,e,f) SR_FEAT (n,e,f,V8_4A)
#define SR_LOR(n,e,f) SR_FEAT (n,e,f,LOR)
#define SR_PAN(n,e,f) SR_FEAT (n,e,f,PAN)
#define SR_RAS(n,e,f) SR_FEAT (n,e,f,RAS)
const aarch64_sys_reg aarch64_sys_regs [] =
{
SR_CORE ("spsr_el1", CPEN_ (0,C0,0), 0), /* = spsr_svc. */
- SR_V8_1 ("spsr_el12", CPEN_ (5,C0,0), 0),
+ SR_V8_1A ("spsr_el12", CPEN_ (5,C0,0), 0),
SR_CORE ("elr_el1", CPEN_ (0,C0,1), 0),
- SR_V8_1 ("elr_el12", CPEN_ (5,C0,1), 0),
+ SR_V8_1A ("elr_el12", CPEN_ (5,C0,1), 0),
SR_CORE ("sp_el0", CPEN_ (0,C1,0), 0),
SR_CORE ("spsel", CPEN_ (0,C2,0), 0),
SR_CORE ("daif", CPEN_ (3,C2,1), 0),
SR_CORE ("currentel", CPEN_ (0,C2,2), F_REG_READ),
SR_PAN ("pan", CPEN_ (0,C2,3), 0),
- SR_V8_2 ("uao", CPEN_ (0,C2,4), 0),
+ SR_V8_2A ("uao", CPEN_ (0,C2,4), 0),
SR_CORE ("nzcv", CPEN_ (3,C2,0), 0),
SR_SSBS ("ssbs", CPEN_ (3,C2,6), 0),
SR_CORE ("fpcr", CPEN_ (3,C4,0), 0),
SR_CORE ("mvfr1_el1", CPENC (3,0,C0,C3,1), F_REG_READ),
SR_CORE ("mvfr2_el1", CPENC (3,0,C0,C3,2), F_REG_READ),
SR_CORE ("ccsidr_el1", CPENC (3,1,C0,C0,0), F_REG_READ),
- SR_V8_3 ("ccsidr2_el1", CPENC (3,1,C0,C0,2), F_REG_READ),
+ SR_V8_3A ("ccsidr2_el1", CPENC (3,1,C0,C0,2), F_REG_READ),
SR_CORE ("id_aa64pfr0_el1", CPENC (3,0,C0,C4,0), F_REG_READ),
SR_CORE ("id_aa64pfr1_el1", CPENC (3,0,C0,C4,1), F_REG_READ),
SR_CORE ("id_aa64dfr0_el1", CPENC (3,0,C0,C5,0), F_REG_READ),
SR_CORE ("sctlr_el1", CPENC (3,0,C1,C0,0), 0),
SR_CORE ("sctlr_el2", CPENC (3,4,C1,C0,0), 0),
SR_CORE ("sctlr_el3", CPENC (3,6,C1,C0,0), 0),
- SR_V8_1 ("sctlr_el12", CPENC (3,5,C1,C0,0), 0),
+ SR_V8_1A ("sctlr_el12", CPENC (3,5,C1,C0,0), 0),
SR_CORE ("actlr_el1", CPENC (3,0,C1,C0,1), 0),
SR_CORE ("actlr_el2", CPENC (3,4,C1,C0,1), 0),
SR_CORE ("actlr_el3", CPENC (3,6,C1,C0,1), 0),
SR_CORE ("cpacr_el1", CPENC (3,0,C1,C0,2), 0),
- SR_V8_1 ("cpacr_el12", CPENC (3,5,C1,C0,2), 0),
+ SR_V8_1A ("cpacr_el12", CPENC (3,5,C1,C0,2), 0),
SR_CORE ("cptr_el2", CPENC (3,4,C1,C1,2), 0),
SR_CORE ("cptr_el3", CPENC (3,6,C1,C1,2), 0),
SR_CORE ("scr_el3", CPENC (3,6,C1,C1,0), 0),
SR_SVE ("zcr_el3", CPENC (3,6,C1,C2,0), 0),
SR_CORE ("ttbr0_el1", CPENC (3,0,C2,C0,0), 0),
SR_CORE ("ttbr1_el1", CPENC (3,0,C2,C0,1), 0),
- SR_V8_A ("ttbr0_el2", CPENC (3,4,C2,C0,0), 0),
+ SR_V8A ("ttbr0_el2", CPENC (3,4,C2,C0,0), 0),
SR_V8_1_A ("ttbr1_el2", CPENC (3,4,C2,C0,1), 0),
SR_CORE ("ttbr0_el3", CPENC (3,6,C2,C0,0), 0),
- SR_V8_1 ("ttbr0_el12", CPENC (3,5,C2,C0,0), 0),
- SR_V8_1 ("ttbr1_el12", CPENC (3,5,C2,C0,1), 0),
- SR_V8_A ("vttbr_el2", CPENC (3,4,C2,C1,0), 0),
+ SR_V8_1A ("ttbr0_el12", CPENC (3,5,C2,C0,0), 0),
+ SR_V8_1A ("ttbr1_el12", CPENC (3,5,C2,C0,1), 0),
+ SR_V8A ("vttbr_el2", CPENC (3,4,C2,C1,0), 0),
SR_CORE ("tcr_el1", CPENC (3,0,C2,C0,2), 0),
SR_CORE ("tcr_el2", CPENC (3,4,C2,C0,2), 0),
SR_CORE ("tcr_el3", CPENC (3,6,C2,C0,2), 0),
- SR_V8_1 ("tcr_el12", CPENC (3,5,C2,C0,2), 0),
+ SR_V8_1A ("tcr_el12", CPENC (3,5,C2,C0,2), 0),
SR_CORE ("vtcr_el2", CPENC (3,4,C2,C1,2), 0),
- SR_V8_3 ("apiakeylo_el1", CPENC (3,0,C2,C1,0), 0),
- SR_V8_3 ("apiakeyhi_el1", CPENC (3,0,C2,C1,1), 0),
- SR_V8_3 ("apibkeylo_el1", CPENC (3,0,C2,C1,2), 0),
- SR_V8_3 ("apibkeyhi_el1", CPENC (3,0,C2,C1,3), 0),
- SR_V8_3 ("apdakeylo_el1", CPENC (3,0,C2,C2,0), 0),
- SR_V8_3 ("apdakeyhi_el1", CPENC (3,0,C2,C2,1), 0),
- SR_V8_3 ("apdbkeylo_el1", CPENC (3,0,C2,C2,2), 0),
- SR_V8_3 ("apdbkeyhi_el1", CPENC (3,0,C2,C2,3), 0),
- SR_V8_3 ("apgakeylo_el1", CPENC (3,0,C2,C3,0), 0),
- SR_V8_3 ("apgakeyhi_el1", CPENC (3,0,C2,C3,1), 0),
+ SR_V8_3A ("apiakeylo_el1", CPENC (3,0,C2,C1,0), 0),
+ SR_V8_3A ("apiakeyhi_el1", CPENC (3,0,C2,C1,1), 0),
+ SR_V8_3A ("apibkeylo_el1", CPENC (3,0,C2,C1,2), 0),
+ SR_V8_3A ("apibkeyhi_el1", CPENC (3,0,C2,C1,3), 0),
+ SR_V8_3A ("apdakeylo_el1", CPENC (3,0,C2,C2,0), 0),
+ SR_V8_3A ("apdakeyhi_el1", CPENC (3,0,C2,C2,1), 0),
+ SR_V8_3A ("apdbkeylo_el1", CPENC (3,0,C2,C2,2), 0),
+ SR_V8_3A ("apdbkeyhi_el1", CPENC (3,0,C2,C2,3), 0),
+ SR_V8_3A ("apgakeylo_el1", CPENC (3,0,C2,C3,0), 0),
+ SR_V8_3A ("apgakeyhi_el1", CPENC (3,0,C2,C3,1), 0),
SR_CORE ("afsr0_el1", CPENC (3,0,C5,C1,0), 0),
SR_CORE ("afsr1_el1", CPENC (3,0,C5,C1,1), 0),
SR_CORE ("afsr0_el2", CPENC (3,4,C5,C1,0), 0),
SR_CORE ("afsr1_el2", CPENC (3,4,C5,C1,1), 0),
SR_CORE ("afsr0_el3", CPENC (3,6,C5,C1,0), 0),
- SR_V8_1 ("afsr0_el12", CPENC (3,5,C5,C1,0), 0),
+ SR_V8_1A ("afsr0_el12", CPENC (3,5,C5,C1,0), 0),
SR_CORE ("afsr1_el3", CPENC (3,6,C5,C1,1), 0),
- SR_V8_1 ("afsr1_el12", CPENC (3,5,C5,C1,1), 0),
+ SR_V8_1A ("afsr1_el12", CPENC (3,5,C5,C1,1), 0),
SR_CORE ("esr_el1", CPENC (3,0,C5,C2,0), 0),
SR_CORE ("esr_el2", CPENC (3,4,C5,C2,0), 0),
SR_CORE ("esr_el3", CPENC (3,6,C5,C2,0), 0),
- SR_V8_1 ("esr_el12", CPENC (3,5,C5,C2,0), 0),
+ SR_V8_1A ("esr_el12", CPENC (3,5,C5,C2,0), 0),
SR_RAS ("vsesr_el2", CPENC (3,4,C5,C2,3), 0),
SR_CORE ("fpexc32_el2", CPENC (3,4,C5,C3,0), 0),
SR_RAS ("erridr_el1", CPENC (3,0,C5,C3,0), F_REG_READ),
SR_CORE ("far_el1", CPENC (3,0,C6,C0,0), 0),
SR_CORE ("far_el2", CPENC (3,4,C6,C0,0), 0),
SR_CORE ("far_el3", CPENC (3,6,C6,C0,0), 0),
- SR_V8_1 ("far_el12", CPENC (3,5,C6,C0,0), 0),
+ SR_V8_1A ("far_el12", CPENC (3,5,C6,C0,0), 0),
SR_CORE ("hpfar_el2", CPENC (3,4,C6,C0,4), 0),
SR_CORE ("par_el1", CPENC (3,0,C7,C4,0), 0),
SR_CORE ("mair_el1", CPENC (3,0,C10,C2,0), 0),
SR_CORE ("mair_el2", CPENC (3,4,C10,C2,0), 0),
SR_CORE ("mair_el3", CPENC (3,6,C10,C2,0), 0),
- SR_V8_1 ("mair_el12", CPENC (3,5,C10,C2,0), 0),
+ SR_V8_1A ("mair_el12", CPENC (3,5,C10,C2,0), 0),
SR_CORE ("amair_el1", CPENC (3,0,C10,C3,0), 0),
SR_CORE ("amair_el2", CPENC (3,4,C10,C3,0), 0),
SR_CORE ("amair_el3", CPENC (3,6,C10,C3,0), 0),
- SR_V8_1 ("amair_el12", CPENC (3,5,C10,C3,0), 0),
+ SR_V8_1A ("amair_el12", CPENC (3,5,C10,C3,0), 0),
SR_CORE ("vbar_el1", CPENC (3,0,C12,C0,0), 0),
SR_CORE ("vbar_el2", CPENC (3,4,C12,C0,0), 0),
SR_CORE ("vbar_el3", CPENC (3,6,C12,C0,0), 0),
- SR_V8_1 ("vbar_el12", CPENC (3,5,C12,C0,0), 0),
+ SR_V8_1A ("vbar_el12", CPENC (3,5,C12,C0,0), 0),
SR_CORE ("rvbar_el1", CPENC (3,0,C12,C0,1), F_REG_READ),
SR_CORE ("rvbar_el2", CPENC (3,4,C12,C0,1), F_REG_READ),
SR_CORE ("rvbar_el3", CPENC (3,6,C12,C0,1), F_REG_READ),
SR_RAS ("disr_el1", CPENC (3,0,C12,C1,1), 0),
SR_RAS ("vdisr_el2", CPENC (3,4,C12,C1,1), 0),
SR_CORE ("contextidr_el1", CPENC (3,0,C13,C0,1), 0),
- SR_V8_1 ("contextidr_el2", CPENC (3,4,C13,C0,1), 0),
- SR_V8_1 ("contextidr_el12", CPENC (3,5,C13,C0,1), 0),
+ SR_V8_1A ("contextidr_el2", CPENC (3,4,C13,C0,1), 0),
+ SR_V8_1A ("contextidr_el12", CPENC (3,5,C13,C0,1), 0),
SR_RNG ("rndr", CPENC (3,3,C2,C4,0), F_REG_READ),
SR_RNG ("rndrrs", CPENC (3,3,C2,C4,1), F_REG_READ),
SR_MEMTAG ("tco", CPENC (3,3,C4,C2,7), 0),
SR_CORE ("cntvct_el0", CPENC (3,3,C14,C0,2), F_REG_READ),
SR_CORE ("cntvoff_el2", CPENC (3,4,C14,C0,3), 0),
SR_CORE ("cntkctl_el1", CPENC (3,0,C14,C1,0), 0),
- SR_V8_1 ("cntkctl_el12", CPENC (3,5,C14,C1,0), 0),
+ SR_V8_1A ("cntkctl_el12", CPENC (3,5,C14,C1,0), 0),
SR_CORE ("cnthctl_el2", CPENC (3,4,C14,C1,0), 0),
SR_CORE ("cntp_tval_el0", CPENC (3,3,C14,C2,0), 0),
- SR_V8_1 ("cntp_tval_el02", CPENC (3,5,C14,C2,0), 0),
+ SR_V8_1A ("cntp_tval_el02", CPENC (3,5,C14,C2,0), 0),
SR_CORE ("cntp_ctl_el0", CPENC (3,3,C14,C2,1), 0),
- SR_V8_1 ("cntp_ctl_el02", CPENC (3,5,C14,C2,1), 0),
+ SR_V8_1A ("cntp_ctl_el02", CPENC (3,5,C14,C2,1), 0),
SR_CORE ("cntp_cval_el0", CPENC (3,3,C14,C2,2), 0),
- SR_V8_1 ("cntp_cval_el02", CPENC (3,5,C14,C2,2), 0),
+ SR_V8_1A ("cntp_cval_el02", CPENC (3,5,C14,C2,2), 0),
SR_CORE ("cntv_tval_el0", CPENC (3,3,C14,C3,0), 0),
- SR_V8_1 ("cntv_tval_el02", CPENC (3,5,C14,C3,0), 0),
+ SR_V8_1A ("cntv_tval_el02", CPENC (3,5,C14,C3,0), 0),
SR_CORE ("cntv_ctl_el0", CPENC (3,3,C14,C3,1), 0),
- SR_V8_1 ("cntv_ctl_el02", CPENC (3,5,C14,C3,1), 0),
+ SR_V8_1A ("cntv_ctl_el02", CPENC (3,5,C14,C3,1), 0),
SR_CORE ("cntv_cval_el0", CPENC (3,3,C14,C3,2), 0),
- SR_V8_1 ("cntv_cval_el02", CPENC (3,5,C14,C3,2), 0),
+ SR_V8_1A ("cntv_cval_el02", CPENC (3,5,C14,C3,2), 0),
SR_CORE ("cnthp_tval_el2", CPENC (3,4,C14,C2,0), 0),
SR_CORE ("cnthp_ctl_el2", CPENC (3,4,C14,C2,1), 0),
SR_CORE ("cnthp_cval_el2", CPENC (3,4,C14,C2,2), 0),
SR_CORE ("cntps_tval_el1", CPENC (3,7,C14,C2,0), 0),
SR_CORE ("cntps_ctl_el1", CPENC (3,7,C14,C2,1), 0),
SR_CORE ("cntps_cval_el1", CPENC (3,7,C14,C2,2), 0),
- SR_V8_1 ("cnthv_tval_el2", CPENC (3,4,C14,C3,0), 0),
- SR_V8_1 ("cnthv_ctl_el2", CPENC (3,4,C14,C3,1), 0),
- SR_V8_1 ("cnthv_cval_el2", CPENC (3,4,C14,C3,2), 0),
+ SR_V8_1A ("cnthv_tval_el2", CPENC (3,4,C14,C3,0), 0),
+ SR_V8_1A ("cnthv_ctl_el2", CPENC (3,4,C14,C3,1), 0),
+ SR_V8_1A ("cnthv_cval_el2", CPENC (3,4,C14,C3,2), 0),
SR_CORE ("dacr32_el2", CPENC (3,4,C3,C0,0), 0),
SR_CORE ("ifsr32_el2", CPENC (3,4,C5,C0,1), 0),
SR_CORE ("teehbr32_el1", CPENC (2,2,C1,C0,0), 0),
SR_CORE ("pmevtyper30_el0", CPENC (3,3,C14,C15,6), 0),
SR_CORE ("pmccfiltr_el0", CPENC (3,3,C14,C15,7), 0),
- SR_V8_4 ("dit", CPEN_ (3,C2,5), 0),
- SR_V8_4 ("trfcr_el1", CPENC (3,0,C1,C2,1), 0),
- SR_V8_4 ("pmmir_el1", CPENC (3,0,C9,C14,6), F_REG_READ),
- SR_V8_4 ("trfcr_el2", CPENC (3,4,C1,C2,1), 0),
- SR_V8_4 ("vstcr_el2", CPENC (3,4,C2,C6,2), 0),
+ SR_V8_4A ("dit", CPEN_ (3,C2,5), 0),
+ SR_V8_4A ("trfcr_el1", CPENC (3,0,C1,C2,1), 0),
+ SR_V8_4A ("pmmir_el1", CPENC (3,0,C9,C14,6), F_REG_READ),
+ SR_V8_4A ("trfcr_el2", CPENC (3,4,C1,C2,1), 0),
+ SR_V8_4A ("vstcr_el2", CPENC (3,4,C2,C6,2), 0),
SR_V8_4_A ("vsttbr_el2", CPENC (3,4,C2,C6,0), 0),
- SR_V8_4 ("cnthvs_tval_el2", CPENC (3,4,C14,C4,0), 0),
- SR_V8_4 ("cnthvs_cval_el2", CPENC (3,4,C14,C4,2), 0),
- SR_V8_4 ("cnthvs_ctl_el2", CPENC (3,4,C14,C4,1), 0),
- SR_V8_4 ("cnthps_tval_el2", CPENC (3,4,C14,C5,0), 0),
- SR_V8_4 ("cnthps_cval_el2", CPENC (3,4,C14,C5,2), 0),
- SR_V8_4 ("cnthps_ctl_el2", CPENC (3,4,C14,C5,1), 0),
- SR_V8_4 ("sder32_el2", CPENC (3,4,C1,C3,1), 0),
- SR_V8_4 ("vncr_el2", CPENC (3,4,C2,C2,0), 0),
- SR_V8_4 ("trfcr_el12", CPENC (3,5,C1,C2,1), 0),
+ SR_V8_4A ("cnthvs_tval_el2", CPENC (3,4,C14,C4,0), 0),
+ SR_V8_4A ("cnthvs_cval_el2", CPENC (3,4,C14,C4,2), 0),
+ SR_V8_4A ("cnthvs_ctl_el2", CPENC (3,4,C14,C4,1), 0),
+ SR_V8_4A ("cnthps_tval_el2", CPENC (3,4,C14,C5,0), 0),
+ SR_V8_4A ("cnthps_cval_el2", CPENC (3,4,C14,C5,2), 0),
+ SR_V8_4A ("cnthps_ctl_el2", CPENC (3,4,C14,C5,1), 0),
+ SR_V8_4A ("sder32_el2", CPENC (3,4,C1,C3,1), 0),
+ SR_V8_4A ("vncr_el2", CPENC (3,4,C2,C2,0), 0),
+ SR_V8_4A ("trfcr_el12", CPENC (3,5,C1,C2,1), 0),
SR_CORE ("mpam0_el1", CPENC (3,0,C10,C5,1), 0),
SR_CORE ("mpam1_el1", CPENC (3,0,C10,C5,0), 0),
SR_CORE ("mpamvpm7_el2", CPENC (3,4,C10,C6,7), 0),
SR_CORE ("mpamvpmv_el2", CPENC (3,4,C10,C4,1), 0),
- SR_V8_R ("mpuir_el1", CPENC (3,0,C0,C0,4), F_REG_READ),
- SR_V8_R ("mpuir_el2", CPENC (3,4,C0,C0,4), F_REG_READ),
- SR_V8_R ("prbar_el1", CPENC (3,0,C6,C8,0), 0),
- SR_V8_R ("prbar_el2", CPENC (3,4,C6,C8,0), 0),
+ SR_V8R ("mpuir_el1", CPENC (3,0,C0,C0,4), F_REG_READ),
+ SR_V8R ("mpuir_el2", CPENC (3,4,C0,C0,4), F_REG_READ),
+ SR_V8R ("prbar_el1", CPENC (3,0,C6,C8,0), 0),
+ SR_V8R ("prbar_el2", CPENC (3,4,C6,C8,0), 0),
#define ENC_BARLAR(x,n,lar) \
CPENC (3, (x-1) << 2, C6, 8 | (n >> 1), ((n & 1) << 2) | lar)
-#define PRBARn_ELx(x,n) SR_V8_R ("prbar" #n "_el" #x, ENC_BARLAR (x,n,0), 0)
-#define PRLARn_ELx(x,n) SR_V8_R ("prlar" #n "_el" #x, ENC_BARLAR (x,n,1), 0)
+#define PRBARn_ELx(x,n) SR_V8R ("prbar" #n "_el" #x, ENC_BARLAR (x,n,0), 0)
+#define PRLARn_ELx(x,n) SR_V8R ("prlar" #n "_el" #x, ENC_BARLAR (x,n,1), 0)
SR_EXPAND_EL12 (PRBARn_ELx)
- SR_V8_R ("prenr_el1", CPENC (3,0,C6,C1,1), 0),
- SR_V8_R ("prenr_el2", CPENC (3,4,C6,C1,1), 0),
- SR_V8_R ("prlar_el1", CPENC (3,0,C6,C8,1), 0),
- SR_V8_R ("prlar_el2", CPENC (3,4,C6,C8,1), 0),
+ SR_V8R ("prenr_el1", CPENC (3,0,C6,C1,1), 0),
+ SR_V8R ("prenr_el2", CPENC (3,4,C6,C1,1), 0),
+ SR_V8R ("prlar_el1", CPENC (3,0,C6,C8,1), 0),
+ SR_V8R ("prlar_el2", CPENC (3,4,C6,C8,1), 0),
SR_EXPAND_EL12 (PRLARn_ELx)
- SR_V8_R ("prselr_el1", CPENC (3,0,C6,C2,1), 0),
- SR_V8_R ("prselr_el2", CPENC (3,4,C6,C2,1), 0),
- SR_V8_R ("vsctlr_el2", CPENC (3,4,C2,C0,0), 0),
+ SR_V8R ("prselr_el1", CPENC (3,0,C6,C2,1), 0),
+ SR_V8R ("prselr_el2", CPENC (3,4,C6,C2,1), 0),
+ SR_V8R ("vsctlr_el2", CPENC (3,4,C2,C0,0), 0),
SR_CORE("trbbaser_el1", CPENC (3,0,C9,C11,2), 0),
SR_CORE("trbidr_el1", CPENC (3,0,C9,C11,7), F_REG_READ),
SR_GIC ("ich_lr15_el2", CPENC (3,4,C12,C13,7), 0),
SR_GIC ("icc_igrpen1_el3", CPENC (3,6,C12,C12,7), 0),
- SR_V8_6 ("amcg1idr_el0", CPENC (3,3,C13,C2,6), F_REG_READ),
- SR_V8_6 ("cntpctss_el0", CPENC (3,3,C14,C0,5), F_REG_READ),
- SR_V8_6 ("cntvctss_el0", CPENC (3,3,C14,C0,6), F_REG_READ),
- SR_V8_6 ("hfgrtr_el2", CPENC (3,4,C1,C1,4), 0),
- SR_V8_6 ("hfgwtr_el2", CPENC (3,4,C1,C1,5), 0),
- SR_V8_6 ("hfgitr_el2", CPENC (3,4,C1,C1,6), 0),
- SR_V8_6 ("hdfgrtr_el2", CPENC (3,4,C3,C1,4), 0),
- SR_V8_6 ("hdfgwtr_el2", CPENC (3,4,C3,C1,5), 0),
- SR_V8_6 ("hafgrtr_el2", CPENC (3,4,C3,C1,6), 0),
- SR_V8_6 ("amevcntvoff00_el2", CPENC (3,4,C13,C8,0), 0),
- SR_V8_6 ("amevcntvoff01_el2", CPENC (3,4,C13,C8,1), 0),
- SR_V8_6 ("amevcntvoff02_el2", CPENC (3,4,C13,C8,2), 0),
- SR_V8_6 ("amevcntvoff03_el2", CPENC (3,4,C13,C8,3), 0),
- SR_V8_6 ("amevcntvoff04_el2", CPENC (3,4,C13,C8,4), 0),
- SR_V8_6 ("amevcntvoff05_el2", CPENC (3,4,C13,C8,5), 0),
- SR_V8_6 ("amevcntvoff06_el2", CPENC (3,4,C13,C8,6), 0),
- SR_V8_6 ("amevcntvoff07_el2", CPENC (3,4,C13,C8,7), 0),
- SR_V8_6 ("amevcntvoff08_el2", CPENC (3,4,C13,C9,0), 0),
- SR_V8_6 ("amevcntvoff09_el2", CPENC (3,4,C13,C9,1), 0),
- SR_V8_6 ("amevcntvoff010_el2", CPENC (3,4,C13,C9,2), 0),
- SR_V8_6 ("amevcntvoff011_el2", CPENC (3,4,C13,C9,3), 0),
- SR_V8_6 ("amevcntvoff012_el2", CPENC (3,4,C13,C9,4), 0),
- SR_V8_6 ("amevcntvoff013_el2", CPENC (3,4,C13,C9,5), 0),
- SR_V8_6 ("amevcntvoff014_el2", CPENC (3,4,C13,C9,6), 0),
- SR_V8_6 ("amevcntvoff015_el2", CPENC (3,4,C13,C9,7), 0),
- SR_V8_6 ("amevcntvoff10_el2", CPENC (3,4,C13,C10,0), 0),
- SR_V8_6 ("amevcntvoff11_el2", CPENC (3,4,C13,C10,1), 0),
- SR_V8_6 ("amevcntvoff12_el2", CPENC (3,4,C13,C10,2), 0),
- SR_V8_6 ("amevcntvoff13_el2", CPENC (3,4,C13,C10,3), 0),
- SR_V8_6 ("amevcntvoff14_el2", CPENC (3,4,C13,C10,4), 0),
- SR_V8_6 ("amevcntvoff15_el2", CPENC (3,4,C13,C10,5), 0),
- SR_V8_6 ("amevcntvoff16_el2", CPENC (3,4,C13,C10,6), 0),
- SR_V8_6 ("amevcntvoff17_el2", CPENC (3,4,C13,C10,7), 0),
- SR_V8_6 ("amevcntvoff18_el2", CPENC (3,4,C13,C11,0), 0),
- SR_V8_6 ("amevcntvoff19_el2", CPENC (3,4,C13,C11,1), 0),
- SR_V8_6 ("amevcntvoff110_el2", CPENC (3,4,C13,C11,2), 0),
- SR_V8_6 ("amevcntvoff111_el2", CPENC (3,4,C13,C11,3), 0),
- SR_V8_6 ("amevcntvoff112_el2", CPENC (3,4,C13,C11,4), 0),
- SR_V8_6 ("amevcntvoff113_el2", CPENC (3,4,C13,C11,5), 0),
- SR_V8_6 ("amevcntvoff114_el2", CPENC (3,4,C13,C11,6), 0),
- SR_V8_6 ("amevcntvoff115_el2", CPENC (3,4,C13,C11,7), 0),
- SR_V8_6 ("cntpoff_el2", CPENC (3,4,C14,C0,6), 0),
-
- SR_V8_7 ("pmsnevfr_el1", CPENC (3,0,C9,C9,1), 0),
- SR_V8_7 ("hcrx_el2", CPENC (3,4,C1,C2,2), 0),
-
- SR_V8_8 ("allint", CPENC (3,0,C4,C3,0), 0),
- SR_V8_8 ("icc_nmiar1_el1", CPENC (3,0,C12,C9,5), F_REG_READ),
+ SR_V8_6A ("amcg1idr_el0", CPENC (3,3,C13,C2,6), F_REG_READ),
+ SR_V8_6A ("cntpctss_el0", CPENC (3,3,C14,C0,5), F_REG_READ),
+ SR_V8_6A ("cntvctss_el0", CPENC (3,3,C14,C0,6), F_REG_READ),
+ SR_V8_6A ("hfgrtr_el2", CPENC (3,4,C1,C1,4), 0),
+ SR_V8_6A ("hfgwtr_el2", CPENC (3,4,C1,C1,5), 0),
+ SR_V8_6A ("hfgitr_el2", CPENC (3,4,C1,C1,6), 0),
+ SR_V8_6A ("hdfgrtr_el2", CPENC (3,4,C3,C1,4), 0),
+ SR_V8_6A ("hdfgwtr_el2", CPENC (3,4,C3,C1,5), 0),
+ SR_V8_6A ("hafgrtr_el2", CPENC (3,4,C3,C1,6), 0),
+ SR_V8_6A ("amevcntvoff00_el2", CPENC (3,4,C13,C8,0), 0),
+ SR_V8_6A ("amevcntvoff01_el2", CPENC (3,4,C13,C8,1), 0),
+ SR_V8_6A ("amevcntvoff02_el2", CPENC (3,4,C13,C8,2), 0),
+ SR_V8_6A ("amevcntvoff03_el2", CPENC (3,4,C13,C8,3), 0),
+ SR_V8_6A ("amevcntvoff04_el2", CPENC (3,4,C13,C8,4), 0),
+ SR_V8_6A ("amevcntvoff05_el2", CPENC (3,4,C13,C8,5), 0),
+ SR_V8_6A ("amevcntvoff06_el2", CPENC (3,4,C13,C8,6), 0),
+ SR_V8_6A ("amevcntvoff07_el2", CPENC (3,4,C13,C8,7), 0),
+ SR_V8_6A ("amevcntvoff08_el2", CPENC (3,4,C13,C9,0), 0),
+ SR_V8_6A ("amevcntvoff09_el2", CPENC (3,4,C13,C9,1), 0),
+ SR_V8_6A ("amevcntvoff010_el2", CPENC (3,4,C13,C9,2), 0),
+ SR_V8_6A ("amevcntvoff011_el2", CPENC (3,4,C13,C9,3), 0),
+ SR_V8_6A ("amevcntvoff012_el2", CPENC (3,4,C13,C9,4), 0),
+ SR_V8_6A ("amevcntvoff013_el2", CPENC (3,4,C13,C9,5), 0),
+ SR_V8_6A ("amevcntvoff014_el2", CPENC (3,4,C13,C9,6), 0),
+ SR_V8_6A ("amevcntvoff015_el2", CPENC (3,4,C13,C9,7), 0),
+ SR_V8_6A ("amevcntvoff10_el2", CPENC (3,4,C13,C10,0), 0),
+ SR_V8_6A ("amevcntvoff11_el2", CPENC (3,4,C13,C10,1), 0),
+ SR_V8_6A ("amevcntvoff12_el2", CPENC (3,4,C13,C10,2), 0),
+ SR_V8_6A ("amevcntvoff13_el2", CPENC (3,4,C13,C10,3), 0),
+ SR_V8_6A ("amevcntvoff14_el2", CPENC (3,4,C13,C10,4), 0),
+ SR_V8_6A ("amevcntvoff15_el2", CPENC (3,4,C13,C10,5), 0),
+ SR_V8_6A ("amevcntvoff16_el2", CPENC (3,4,C13,C10,6), 0),
+ SR_V8_6A ("amevcntvoff17_el2", CPENC (3,4,C13,C10,7), 0),
+ SR_V8_6A ("amevcntvoff18_el2", CPENC (3,4,C13,C11,0), 0),
+ SR_V8_6A ("amevcntvoff19_el2", CPENC (3,4,C13,C11,1), 0),
+ SR_V8_6A ("amevcntvoff110_el2", CPENC (3,4,C13,C11,2), 0),
+ SR_V8_6A ("amevcntvoff111_el2", CPENC (3,4,C13,C11,3), 0),
+ SR_V8_6A ("amevcntvoff112_el2", CPENC (3,4,C13,C11,4), 0),
+ SR_V8_6A ("amevcntvoff113_el2", CPENC (3,4,C13,C11,5), 0),
+ SR_V8_6A ("amevcntvoff114_el2", CPENC (3,4,C13,C11,6), 0),
+ SR_V8_6A ("amevcntvoff115_el2", CPENC (3,4,C13,C11,7), 0),
+ SR_V8_6A ("cntpoff_el2", CPENC (3,4,C14,C0,6), 0),
+
+ SR_V8_7A ("pmsnevfr_el1", CPENC (3,0,C9,C9,1), 0),
+ SR_V8_7A ("hcrx_el2", CPENC (3,4,C1,C2,2), 0),
+
+ SR_V8_8A ("allint", CPENC (3,0,C4,C3,0), 0),
+ SR_V8_8A ("icc_nmiar1_el1", CPENC (3,0,C12,C9,5), F_REG_READ),
{ 0, CPENC (0,0,0,0,0), 0, 0 }
};
SR_CORE ("daifset", 0x1e, F_REG_MAX_VALUE (15)),
SR_CORE ("daifclr", 0x1f, F_REG_MAX_VALUE (15)),
SR_PAN ("pan", 0x04, F_REG_MAX_VALUE (1)),
- SR_V8_2 ("uao", 0x03, F_REG_MAX_VALUE (1)),
+ SR_V8_2A ("uao", 0x03, F_REG_MAX_VALUE (1)),
SR_SSBS ("ssbs", 0x19, F_REG_MAX_VALUE (1)),
- SR_V8_4 ("dit", 0x1a, F_REG_MAX_VALUE (1)),
+ SR_V8_4A ("dit", 0x1a, F_REG_MAX_VALUE (1)),
SR_MEMTAG ("tco", 0x1c, F_REG_MAX_VALUE (1)),
SR_SME ("svcrsm", 0x1b, PSTATE_ENCODE_CRM_AND_IMM(0x2,0x1)
| F_REG_MAX_VALUE (1)),
| F_REG_MAX_VALUE (1)),
SR_SME ("svcrsmza", 0x1b, PSTATE_ENCODE_CRM_AND_IMM(0x6,0x1)
| F_REG_MAX_VALUE (1)),
- SR_V8_8 ("allint", 0x08, F_REG_MAX_VALUE (1)),
+ SR_V8_8A ("allint", 0x08, F_REG_MAX_VALUE (1)),
{ 0, CPENC (0,0,0,0,0), 0, 0 },
};
aarch64_feature_set reg_features)
{
/* Armv8-R has no EL3. */
- if (AARCH64_CPU_HAS_FEATURE (features, AARCH64_FEATURE_V8_R))
+ if (AARCH64_CPU_HAS_FEATURE (features, AARCH64_FEATURE_V8R))
{
const char *suffix = strrchr (reg_name, '_');
if (suffix && !strcmp (suffix, "_el3"))
|| reg_value == CPENS (6, C8, C2, 5)
|| reg_value == CPENS (6, C8, C5, 1)
|| reg_value == CPENS (6, C8, C5, 5))
- && AARCH64_CPU_HAS_FEATURE (features, AARCH64_FEATURE_V8_4))
+ && AARCH64_CPU_HAS_FEATURE (features, AARCH64_FEATURE_V8_4A))
return true;
/* DC CVAP. Values are from aarch64_sys_regs_dc. */
if (reg_value == CPENS (3, C7, C12, 1)
- && AARCH64_CPU_HAS_FEATURE (features, AARCH64_FEATURE_V8_2))
+ && AARCH64_CPU_HAS_FEATURE (features, AARCH64_FEATURE_V8_2A))
return true;
/* DC CVADP. Values are from aarch64_sys_regs_dc. */
/* AT S1E1RP, AT S1E1WP. Values are from aarch64_sys_regs_at. */
if ((reg_value == CPENS (0, C7, C9, 0)
|| reg_value == CPENS (0, C7, C9, 1))
- && AARCH64_CPU_HAS_FEATURE (features, AARCH64_FEATURE_V8_2))
+ && AARCH64_CPU_HAS_FEATURE (features, AARCH64_FEATURE_V8_2A))
return true;
/* CFP/DVP/CPP RCTX : Value are from aarch64_sys_regs_sr. */
AARCH64_FEATURE (AARCH64_FEATURE_LOR, 0);
static const aarch64_feature_set aarch64_feature_rdma =
AARCH64_FEATURE (AARCH64_FEATURE_RDMA, 0);
-static const aarch64_feature_set aarch64_feature_v8_2 =
- AARCH64_FEATURE (AARCH64_FEATURE_V8_2, 0);
+static const aarch64_feature_set aarch64_feature_v8_2a =
+ AARCH64_FEATURE (AARCH64_FEATURE_V8_2A, 0);
static const aarch64_feature_set aarch64_feature_fp_f16 =
AARCH64_FEATURE (AARCH64_FEATURE_F16 | AARCH64_FEATURE_FP, 0);
static const aarch64_feature_set aarch64_feature_simd_f16 =
AARCH64_FEATURE (AARCH64_FEATURE_F16 | AARCH64_FEATURE_SIMD, 0);
static const aarch64_feature_set aarch64_feature_sve =
AARCH64_FEATURE (AARCH64_FEATURE_SVE, 0);
-static const aarch64_feature_set aarch64_feature_v8_3 =
- AARCH64_FEATURE (AARCH64_FEATURE_V8_3, 0);
-static const aarch64_feature_set aarch64_feature_fp_v8_3 =
- AARCH64_FEATURE (AARCH64_FEATURE_V8_3 | AARCH64_FEATURE_FP, 0);
+static const aarch64_feature_set aarch64_feature_v8_3a =
+ AARCH64_FEATURE (AARCH64_FEATURE_V8_3A, 0);
+static const aarch64_feature_set aarch64_feature_fp_v8_3a =
+ AARCH64_FEATURE (AARCH64_FEATURE_V8_3A | AARCH64_FEATURE_FP, 0);
static const aarch64_feature_set aarch64_feature_pac =
AARCH64_FEATURE (AARCH64_FEATURE_PAC, 0);
static const aarch64_feature_set aarch64_feature_compnum =
AARCH64_FEATURE (AARCH64_FEATURE_V8 | AARCH64_FEATURE_SHA2, 0);
static const aarch64_feature_set aarch64_feature_aes =
AARCH64_FEATURE (AARCH64_FEATURE_V8 | AARCH64_FEATURE_AES, 0);
-static const aarch64_feature_set aarch64_feature_v8_4 =
- AARCH64_FEATURE (AARCH64_FEATURE_V8_4, 0);
+static const aarch64_feature_set aarch64_feature_v8_4a =
+ AARCH64_FEATURE (AARCH64_FEATURE_V8_4A, 0);
static const aarch64_feature_set aarch64_feature_sm4 =
AARCH64_FEATURE (AARCH64_FEATURE_SM4 | AARCH64_FEATURE_SIMD
| AARCH64_FEATURE_FP, 0);
static const aarch64_feature_set aarch64_feature_sha3 =
AARCH64_FEATURE (AARCH64_FEATURE_SHA2 | AARCH64_FEATURE_SHA3
| AARCH64_FEATURE_SIMD | AARCH64_FEATURE_FP, 0);
-static const aarch64_feature_set aarch64_feature_fp_16_v8_2 =
+static const aarch64_feature_set aarch64_feature_fp_16_v8_2a =
AARCH64_FEATURE (AARCH64_FEATURE_F16_FML | AARCH64_FEATURE_F16
| AARCH64_FEATURE_FP, 0);
-static const aarch64_feature_set aarch64_feature_v8_5 =
- AARCH64_FEATURE (AARCH64_FEATURE_V8_5, 0);
+static const aarch64_feature_set aarch64_feature_v8_5a =
+ AARCH64_FEATURE (AARCH64_FEATURE_V8_5A, 0);
static const aarch64_feature_set aarch64_feature_flagmanip =
AARCH64_FEATURE (AARCH64_FEATURE_FLAGMANIP, 0);
static const aarch64_feature_set aarch64_feature_frintts =
AARCH64_FEATURE (AARCH64_FEATURE_SME2 | AARCH64_FEATURE_SME_I16I64, 0);
static const aarch64_feature_set aarch64_feature_sme2_f64f64 =
AARCH64_FEATURE (AARCH64_FEATURE_SME2 | AARCH64_FEATURE_SME_F64F64, 0);
-static const aarch64_feature_set aarch64_feature_v8_6 =
- AARCH64_FEATURE (AARCH64_FEATURE_V8_6, 0);
-static const aarch64_feature_set aarch64_feature_v8_7 =
- AARCH64_FEATURE (AARCH64_FEATURE_V8_7, 0);
+static const aarch64_feature_set aarch64_feature_v8_6a =
+ AARCH64_FEATURE (AARCH64_FEATURE_V8_6A, 0);
+static const aarch64_feature_set aarch64_feature_v8_7a =
+ AARCH64_FEATURE (AARCH64_FEATURE_V8_7A, 0);
static const aarch64_feature_set aarch64_feature_i8mm =
AARCH64_FEATURE (AARCH64_FEATURE_I8MM, 0);
static const aarch64_feature_set aarch64_feature_i8mm_sve =
AARCH64_FEATURE (AARCH64_FEATURE_F32MM | AARCH64_FEATURE_SVE, 0);
static const aarch64_feature_set aarch64_feature_f64mm_sve =
AARCH64_FEATURE (AARCH64_FEATURE_F64MM | AARCH64_FEATURE_SVE, 0);
-static const aarch64_feature_set aarch64_feature_v8_r =
- AARCH64_FEATURE (AARCH64_FEATURE_V8_R, 0);
+static const aarch64_feature_set aarch64_feature_v8r =
+ AARCH64_FEATURE (AARCH64_FEATURE_V8R, 0);
static const aarch64_feature_set aarch64_feature_ls64 =
AARCH64_FEATURE (AARCH64_FEATURE_LS64, 0);
static const aarch64_feature_set aarch64_feature_flagm =
#define RDMA &aarch64_feature_rdma
#define FP_F16 &aarch64_feature_fp_f16
#define SIMD_F16 &aarch64_feature_simd_f16
-#define ARMV8_2 &aarch64_feature_v8_2
+#define ARMV8_2A &aarch64_feature_v8_2a
#define SVE &aarch64_feature_sve
-#define ARMV8_3 &aarch64_feature_v8_3
-#define FP_V8_3 &aarch64_feature_fp_v8_3
+#define ARMV8_3A &aarch64_feature_v8_3a
+#define FP_V8_3A &aarch64_feature_fp_v8_3a
#define PAC &aarch64_feature_pac
#define COMPNUM &aarch64_feature_compnum
#define RCPC &aarch64_feature_rcpc
#define SHA2 &aarch64_feature_sha2
#define AES &aarch64_feature_aes
-#define ARMV8_4 &aarch64_feature_v8_4
+#define ARMV8_4A &aarch64_feature_v8_4a
#define SHA3 &aarch64_feature_sha3
#define SM4 &aarch64_feature_sm4
-#define FP_F16_V8_2 &aarch64_feature_fp_16_v8_2
+#define FP_F16_V8_2A &aarch64_feature_fp_16_v8_2a
#define DOTPROD &aarch64_feature_dotprod
-#define ARMV8_5 &aarch64_feature_v8_5
+#define ARMV8_5A &aarch64_feature_v8_5a
#define FLAGMANIP &aarch64_feature_flagmanip
#define FRINTTS &aarch64_feature_frintts
#define SB &aarch64_feature_sb
#define SME2 &aarch64_feature_sme2
#define SME2_I16I64 &aarch64_feature_sme2_i16i64
#define SME2_F64F64 &aarch64_feature_sme2_f64f64
-#define ARMV8_6 &aarch64_feature_v8_6
-#define ARMV8_6_SVE &aarch64_feature_v8_6
+#define ARMV8_6A &aarch64_feature_v8_6a
+#define ARMV8_6A_SVE &aarch64_feature_v8_6a
#define BFLOAT16_SVE &aarch64_feature_bfloat16_sve
#define BFLOAT16 &aarch64_feature_bfloat16
#define I8MM_SVE &aarch64_feature_i8mm_sve
#define F32MM_SVE &aarch64_feature_f32mm_sve
#define F64MM_SVE &aarch64_feature_f64mm_sve
#define I8MM &aarch64_feature_i8mm
-#define ARMV8_R &aarch64_feature_v8_r
-#define ARMV8_7 &aarch64_feature_v8_7
+#define ARMV8R &aarch64_feature_v8r
+#define ARMV8_7A &aarch64_feature_v8_7a
#define LS64 &aarch64_feature_ls64
#define FLAGM &aarch64_feature_flagm
#define MOPS &aarch64_feature_mops
{ NAME, OPCODE, MASK, CLASS, 0, FP_F16, OPS, QUALS, FLAGS, 0, 0, NULL }
#define SF16_INSN(NAME,OPCODE,MASK,CLASS,OPS,QUALS,FLAGS) \
{ NAME, OPCODE, MASK, CLASS, 0, SIMD_F16, OPS, QUALS, FLAGS, 0, 0, NULL }
-#define V8_2_INSN(NAME,OPCODE,MASK,CLASS,OP,OPS,QUALS,FLAGS) \
- { NAME, OPCODE, MASK, CLASS, OP, ARMV8_2, OPS, QUALS, FLAGS, 0, 0, NULL }
+#define V8_2A_INSN(NAME,OPCODE,MASK,CLASS,OP,OPS,QUALS,FLAGS) \
+ { NAME, OPCODE, MASK, CLASS, OP, ARMV8_2A, OPS, QUALS, FLAGS, 0, 0, NULL }
#define _SVE_INSN(NAME,OPCODE,MASK,CLASS,OP,OPS,QUALS,FLAGS,TIED) \
{ NAME, OPCODE, MASK, CLASS, OP, SVE, OPS, QUALS, \
FLAGS | F_STRICT, 0, TIED, NULL }
#define _SVE_INSNC(NAME,OPCODE,MASK,CLASS,OP,OPS,QUALS,FLAGS,CONSTRAINTS,TIED) \
{ NAME, OPCODE, MASK, CLASS, OP, SVE, OPS, QUALS, \
FLAGS | F_STRICT, CONSTRAINTS, TIED, NULL }
-#define V8_3_INSN(NAME,OPCODE,MASK,CLASS,OPS,QUALS,FLAGS) \
- { NAME, OPCODE, MASK, CLASS, 0, ARMV8_3, OPS, QUALS, FLAGS, 0, 0, NULL }
+#define V8_3A_INSN(NAME,OPCODE,MASK,CLASS,OPS,QUALS,FLAGS) \
+ { NAME, OPCODE, MASK, CLASS, 0, ARMV8_3A, OPS, QUALS, FLAGS, 0, 0, NULL }
#define PAC_INSN(NAME,OPCODE,MASK,CLASS,OPS,QUALS,FLAGS) \
{ NAME, OPCODE, MASK, CLASS, 0, PAC, OPS, QUALS, FLAGS, 0, 0, NULL }
#define CNUM_INSN(NAME,OPCODE,MASK,CLASS,OP,OPS,QUALS,FLAGS) \
{ NAME, OPCODE, MASK, CLASS, 0, SHA2, OPS, QUALS, FLAGS, 0, 0, NULL }
#define AES_INSN(NAME,OPCODE,MASK,CLASS,OPS,QUALS,FLAGS) \
{ NAME, OPCODE, MASK, CLASS, 0, AES, OPS, QUALS, FLAGS, 0, 0, NULL }
-#define V8_4_INSN(NAME,OPCODE,MASK,CLASS,OPS,QUALS,FLAGS) \
- { NAME, OPCODE, MASK, CLASS, 0, ARMV8_4, OPS, QUALS, FLAGS, 0, 0, NULL }
+#define V8_4A_INSN(NAME,OPCODE,MASK,CLASS,OPS,QUALS,FLAGS) \
+ { NAME, OPCODE, MASK, CLASS, 0, ARMV8_4A, OPS, QUALS, FLAGS, 0, 0, NULL }
#define SHA3_INSN(NAME,OPCODE,MASK,CLASS,OPS,QUALS,FLAGS) \
{ NAME, OPCODE, MASK, CLASS, 0, SHA3, OPS, QUALS, FLAGS, 0, 0, NULL }
#define SM4_INSN(NAME,OPCODE,MASK,CLASS,OPS,QUALS,FLAGS) \
{ NAME, OPCODE, MASK, CLASS, 0, SM4, OPS, QUALS, FLAGS, 0, 0, NULL }
-#define FP16_V8_2_INSN(NAME,OPCODE,MASK,CLASS,OPS,QUALS,FLAGS) \
- { NAME, OPCODE, MASK, CLASS, 0, FP_F16_V8_2, OPS, QUALS, FLAGS, 0, 0, NULL }
+#define FP16_V8_2A_INSN(NAME,OPCODE,MASK,CLASS,OPS,QUALS,FLAGS) \
+ { NAME, OPCODE, MASK, CLASS, 0, FP_F16_V8_2A, OPS, QUALS, FLAGS, 0, 0, NULL }
#define DOT_INSN(NAME,OPCODE,MASK,CLASS,OPS,QUALS,FLAGS) \
{ NAME, OPCODE, MASK, CLASS, 0, DOTPROD, OPS, QUALS, FLAGS, 0, 0, NULL }
-#define V8_5_INSN(NAME,OPCODE,MASK,CLASS,OPS,QUALS,FLAGS) \
- { NAME, OPCODE, MASK, CLASS, 0, ARMV8_5, OPS, QUALS, FLAGS, 0, 0, NULL }
+#define V8_5A_INSN(NAME,OPCODE,MASK,CLASS,OPS,QUALS,FLAGS) \
+ { NAME, OPCODE, MASK, CLASS, 0, ARMV8_5A, OPS, QUALS, FLAGS, 0, 0, NULL }
#define FLAGMANIP_INSN(NAME,OPCODE,MASK,CLASS,OPS,QUALS,FLAGS) \
{ NAME, OPCODE, MASK, CLASS, 0, FLAGMANIP, OPS, QUALS, FLAGS, 0, 0, NULL }
#define FRINTTS_INSN(NAME,OPCODE,MASK,CLASS,OPS,QUALS,FLAGS) \
#define SVE2BITPERM_INSN(NAME,OPCODE,MASK,CLASS,OP,OPS,QUALS,FLAGS,TIED) \
{ NAME, OPCODE, MASK, CLASS, OP, SVE2_BITPERM, OPS, QUALS, \
FLAGS | F_STRICT, 0, TIED, NULL }
-#define V8_6_INSN(NAME,OPCODE,MASK,CLASS,OPS,QUALS,FLAGS) \
- { NAME, OPCODE, MASK, CLASS, 0, ARMV8_6, OPS, QUALS, FLAGS, 0, 0, NULL }
+#define V8_6A_INSN(NAME,OPCODE,MASK,CLASS,OPS,QUALS,FLAGS) \
+ { NAME, OPCODE, MASK, CLASS, 0, ARMV8_6A, OPS, QUALS, FLAGS, 0, 0, NULL }
#define BFLOAT16_SVE_INSN(NAME,OPCODE,MASK,CLASS,OPS,QUALS,FLAGS) \
{ NAME, OPCODE, MASK, CLASS, 0, BFLOAT16_SVE, OPS, QUALS, FLAGS, 0, 0, NULL }
#define BFLOAT16_SVE_INSNC(NAME,OPCODE,MASK,CLASS,OPS,QUALS,FLAGS, CONSTRAINTS, TIED) \
{ NAME, OPCODE, MASK, CLASS, 0, F64MM_SVE, OPS, QUALS, FLAGS, CONSTRAINTS, TIED, NULL }
#define F32MATMUL_SVE_INSNC(NAME,OPCODE,MASK,CLASS,OPS,QUALS,FLAGS, CONSTRAINTS, TIED) \
{ NAME, OPCODE, MASK, CLASS, 0, F32MM_SVE, OPS, QUALS, FLAGS, CONSTRAINTS, TIED, NULL }
-#define V8_R_INSN(NAME,OPCODE,MASK,CLASS,OPS,QUALS,FLAGS) \
- { NAME, OPCODE, MASK, CLASS, 0, ARMV8_R, OPS, QUALS, FLAGS, 0, 0, NULL }
-#define V8_7_INSN(NAME,OPCODE,MASK,CLASS,OPS,QUALS,FLAGS) \
- { NAME, OPCODE, MASK, CLASS, 0, ARMV8_7, OPS, QUALS, FLAGS, 0, 0, NULL }
+#define V8R_INSN(NAME,OPCODE,MASK,CLASS,OPS,QUALS,FLAGS) \
+ { NAME, OPCODE, MASK, CLASS, 0, ARMV8R, OPS, QUALS, FLAGS, 0, 0, NULL }
+#define V8_7A_INSN(NAME,OPCODE,MASK,CLASS,OPS,QUALS,FLAGS) \
+ { NAME, OPCODE, MASK, CLASS, 0, ARMV8_7A, OPS, QUALS, FLAGS, 0, 0, NULL }
#define _LS64_INSN(NAME,OPCODE,MASK,CLASS,OPS,QUALS,FLAGS) \
{ NAME, OPCODE, MASK, CLASS, 0, LS64, OPS, QUALS, FLAGS, 0, 0, NULL }
#define FLAGM_INSN(NAME,OPCODE,MASK,CLASS,OPS,QUALS,FLAGS) \
CORE_INSN ("asr", 0x13000000, 0x7f800000, bitfield, OP_ASR_IMM, OP3 (Rd, Rn, IMM), QL_SHIFT, F_ALIAS | F_P2 | F_CONV),
CORE_INSN ("bfm", 0x33000000, 0x7f800000, bitfield, 0, OP4 (Rd, Rn, IMMR, IMMS), QL_BF, F_HAS_ALIAS | F_SF | F_N),
CORE_INSN ("bfi", 0x33000000, 0x7f800000, bitfield, OP_BFI, OP4 (Rd, Rn, IMM, WIDTH), QL_BF2, F_ALIAS | F_P1 | F_CONV),
- V8_2_INSN ("bfc", 0x330003e0, 0x7f8003e0, bitfield, OP_BFC, OP3 (Rd, IMM, WIDTH), QL_BF1, F_ALIAS | F_P2 | F_CONV),
+ V8_2A_INSN ("bfc", 0x330003e0, 0x7f8003e0, bitfield, OP_BFC, OP3 (Rd, IMM, WIDTH), QL_BF1, F_ALIAS | F_P2 | F_CONV),
CORE_INSN ("bfxil", 0x33000000, 0x7f800000, bitfield, OP_BFXIL, OP4 (Rd, Rn, IMM, WIDTH), QL_BF2, F_ALIAS | F_P1 | F_CONV),
CORE_INSN ("ubfm", 0x53000000, 0x7f800000, bitfield, 0, OP4 (Rd, Rn, IMMR, IMMS), QL_BF, F_HAS_ALIAS | F_SF | F_N),
CORE_INSN ("ubfiz", 0x53000000, 0x7f800000, bitfield, OP_UBFIZ, OP4 (Rd, Rn, IMM, WIDTH), QL_BF2, F_ALIAS | F_P1 | F_CONV),
CORE_INSN ("rev16", 0x5ac00400, 0x7ffffc00, dp_1src, 0, OP2 (Rd, Rn), QL_I2SAME, F_SF),
CORE_INSN ("rev", 0x5ac00800, 0xfffffc00, dp_1src, 0, OP2 (Rd, Rn), QL_I2SAMEW, 0),
CORE_INSN ("rev", 0xdac00c00, 0xfffffc00, dp_1src, 0, OP2 (Rd, Rn), QL_I2SAMEX, F_SF | F_HAS_ALIAS | F_P1),
- V8_2_INSN ("rev64", 0xdac00c00, 0xfffffc00, dp_1src, 0, OP2 (Rd, Rn), QL_I2SAMEX, F_SF | F_ALIAS),
+ V8_2A_INSN ("rev64", 0xdac00c00, 0xfffffc00, dp_1src, 0, OP2 (Rd, Rn), QL_I2SAMEX, F_SF | F_ALIAS),
CORE_INSN ("clz", 0x5ac01000, 0x7ffffc00, dp_1src, 0, OP2 (Rd, Rn), QL_I2SAME, F_SF),
CORE_INSN ("cls", 0x5ac01400, 0x7ffffc00, dp_1src, 0, OP2 (Rd, Rn), QL_I2SAME, F_SF),
CORE_INSN ("rev32", 0xdac00800, 0xfffffc00, dp_1src, 0, OP2 (Rd, Rn), QL_I2SAMEX, 0),
FF16_INSN ("fcvtzu",0x1ef90000, 0x7f3ffc00, float2int, OP2 (Rd, Fn), QL_FP2INT_H, F_FPTYPE | F_SF),
__FP_INSN ("fmov", 0x9eae0000, 0xfffffc00, float2int, 0, OP2 (Rd, VnD1), QL_XVD1, 0),
__FP_INSN ("fmov", 0x9eaf0000, 0xfffffc00, float2int, 0, OP2 (VdD1, Rn), QL_VD1X, 0),
- {"fjcvtzs", 0x1e7e0000, 0xfffffc00, float2int, 0, FP_V8_3, OP2 (Rd, Fn), QL_FP2INT_W_D, 0, 0, 0, NULL },
+ {"fjcvtzs", 0x1e7e0000, 0xfffffc00, float2int, 0, FP_V8_3A, OP2 (Rd, Fn), QL_FP2INT_W_D, 0, 0, 0, NULL },
/* Floating-point conditional compare. */
__FP_INSN ("fccmp", 0x1e200400, 0xff200c10, floatccmp, 0, OP4 (Fn, Fm, NZCV, COND), QL_FCCMP, F_FPTYPE),
FF16_INSN ("fccmp", 0x1ee00400, 0xff200c10, floatccmp, OP4 (Fn, Fm, NZCV, COND), QL_FCCMP_H, F_FPTYPE),
CORE_INSN ("clearbhb", 0xd50322df, 0xffffffff, ic_system, 0, OP0 (), {}, F_ALIAS),
CORE_INSN ("clrex", 0xd503305f, 0xfffff0ff, ic_system, 0, OP1 (UIMM4), {}, F_OPD0_OPT | F_DEFAULT (0xF)),
CORE_INSN ("dsb", 0xd503309f, 0xfffff0ff, ic_system, 0, OP1 (BARRIER), {}, F_HAS_ALIAS),
- V8_7_INSN ("dsb", 0xd503323f, 0xfffff3ff, ic_system, OP1 (BARRIER_DSB_NXS), {}, F_HAS_ALIAS),
- V8_R_INSN ("dfb", 0xd5033c9f, 0xffffffff, ic_system, OP0 (), {}, F_ALIAS),
+ V8_7A_INSN ("dsb", 0xd503323f, 0xfffff3ff, ic_system, OP1 (BARRIER_DSB_NXS), {}, F_HAS_ALIAS),
+ V8R_INSN ("dfb", 0xd5033c9f, 0xffffffff, ic_system, OP0 (), {}, F_ALIAS),
CORE_INSN ("ssbb", 0xd503309f, 0xffffffff, ic_system, 0, OP0 (), {}, F_ALIAS),
CORE_INSN ("pssbb", 0xd503349f, 0xffffffff, ic_system, 0, OP0 (), {}, F_ALIAS),
CORE_INSN ("dmb", 0xd50330bf, 0xfffff0ff, ic_system, 0, OP1 (BARRIER), {}, 0),
CORE_INSN ("dc", 0xd5080000, 0xfff80000, ic_system, 0, OP2 (SYSREG_DC, Rt), QL_SRC_X, F_ALIAS),
CORE_INSN ("ic", 0xd5080000, 0xfff80000, ic_system, 0, OP2 (SYSREG_IC, Rt_SYS), QL_SRC_X, F_ALIAS | F_OPD1_OPT | F_DEFAULT (0x1F)),
CORE_INSN ("tlbi",0xd5080000, 0xfff80000, ic_system, 0, OP2 (SYSREG_TLBI, Rt_SYS), QL_SRC_X, F_ALIAS | F_OPD1_OPT | F_DEFAULT (0x1F)),
- V8_7_INSN ("wfet", 0xd5031000, 0xffffffe0, ic_system, OP1 (Rd), QL_I1X, F_HAS_ALIAS),
- V8_7_INSN ("wfit", 0xd5031020, 0xffffffe0, ic_system, OP1 (Rd), QL_I1X, F_HAS_ALIAS),
+ V8_7A_INSN ("wfet", 0xd5031000, 0xffffffe0, ic_system, OP1 (Rd), QL_I1X, F_HAS_ALIAS),
+ V8_7A_INSN ("wfit", 0xd5031020, 0xffffffe0, ic_system, OP1 (Rd), QL_I1X, F_HAS_ALIAS),
PREDRES_INSN ("cfp", 0xd50b7380, 0xffffffe0, ic_system, OP2 (SYSREG_SR, Rt), QL_SRC_X, F_ALIAS),
PREDRES_INSN ("dvp", 0xd50b73a0, 0xffffffe0, ic_system, OP2 (SYSREG_SR, Rt), QL_SRC_X, F_ALIAS),
PREDRES_INSN ("cpp", 0xd50b73e0, 0xffffffe0, ic_system, OP2 (SYSREG_SR, Rt), QL_SRC_X, F_ALIAS),
SM4_INSN ("sm4e", 0xcec08400, 0xfffffc00, cryptosm4, OP2 (Vd, Vn), QL_V2SAME4S, 0),
SM4_INSN ("sm4ekey", 0xce60c800, 0xffe0fc00, cryptosm4, OP3 (Vd, Vn, Vm), QL_V3SAME4S, 0),
/* Crypto FP16 (optional in ARMv8.2-a). */
- FP16_V8_2_INSN ("fmlal", 0xe20ec00, 0xffa0fc00, asimdsame, OP3 (Vd, Vn, Vm), QL_V3FML2S, 0),
- FP16_V8_2_INSN ("fmlsl", 0xea0ec00, 0xffa0fc00, asimdsame, OP3 (Vd, Vn, Vm), QL_V3FML2S, 0),
- FP16_V8_2_INSN ("fmlal2", 0x2e20cc00, 0xffa0fc00, asimdsame, OP3 (Vd, Vn, Vm), QL_V3FML2S, 0),
- FP16_V8_2_INSN ("fmlsl2", 0x2ea0cc00, 0xffa0fc00, asimdsame, OP3 (Vd, Vn, Vm), QL_V3FML2S, 0),
-
- FP16_V8_2_INSN ("fmlal", 0x4e20ec00, 0xffa0fc00, asimdsame, OP3 (Vd, Vn, Vm), QL_V3FML4S, 0),
- FP16_V8_2_INSN ("fmlsl", 0x4ea0ec00, 0xffa0fc00, asimdsame, OP3 (Vd, Vn, Vm), QL_V3FML4S, 0),
- FP16_V8_2_INSN ("fmlal2", 0x6e20cc00, 0xffa0fc00, asimdsame, OP3 (Vd, Vn, Vm), QL_V3FML4S, 0),
- FP16_V8_2_INSN ("fmlsl2", 0x6ea0cc00, 0xffa0fc00, asimdsame, OP3 (Vd, Vn, Vm), QL_V3FML4S, 0),
-
- FP16_V8_2_INSN ("fmlal", 0xf800000, 0xffc0f400, asimdelem, OP3 (Vd, Vn, Em16), QL_V2FML2S, 0),
- FP16_V8_2_INSN ("fmlsl", 0xf804000, 0xffc0f400, asimdelem, OP3 (Vd, Vn, Em16), QL_V2FML2S, 0),
- FP16_V8_2_INSN ("fmlal2", 0x2f808000, 0xffc0f400, asimdelem, OP3 (Vd, Vn, Em16), QL_V2FML2S, 0),
- FP16_V8_2_INSN ("fmlsl2", 0x2f80c000, 0xffc0f400, asimdelem, OP3 (Vd, Vn, Em16), QL_V2FML2S, 0),
-
- FP16_V8_2_INSN ("fmlal", 0x4f800000, 0xffc0f400, asimdelem, OP3 (Vd, Vn, Em16), QL_V2FML4S, 0),
- FP16_V8_2_INSN ("fmlsl", 0x4f804000, 0xffc0f400, asimdelem, OP3 (Vd, Vn, Em16), QL_V2FML4S, 0),
- FP16_V8_2_INSN ("fmlal2", 0x6f808000, 0xffc0f400, asimdelem, OP3 (Vd, Vn, Em16), QL_V2FML4S, 0),
- FP16_V8_2_INSN ("fmlsl2", 0x6f80c000, 0xffc0f400, asimdelem, OP3 (Vd, Vn, Em16), QL_V2FML4S, 0),
+ FP16_V8_2A_INSN ("fmlal", 0xe20ec00, 0xffa0fc00, asimdsame, OP3 (Vd, Vn, Vm), QL_V3FML2S, 0),
+ FP16_V8_2A_INSN ("fmlsl", 0xea0ec00, 0xffa0fc00, asimdsame, OP3 (Vd, Vn, Vm), QL_V3FML2S, 0),
+ FP16_V8_2A_INSN ("fmlal2", 0x2e20cc00, 0xffa0fc00, asimdsame, OP3 (Vd, Vn, Vm), QL_V3FML2S, 0),
+ FP16_V8_2A_INSN ("fmlsl2", 0x2ea0cc00, 0xffa0fc00, asimdsame, OP3 (Vd, Vn, Vm), QL_V3FML2S, 0),
+
+ FP16_V8_2A_INSN ("fmlal", 0x4e20ec00, 0xffa0fc00, asimdsame, OP3 (Vd, Vn, Vm), QL_V3FML4S, 0),
+ FP16_V8_2A_INSN ("fmlsl", 0x4ea0ec00, 0xffa0fc00, asimdsame, OP3 (Vd, Vn, Vm), QL_V3FML4S, 0),
+ FP16_V8_2A_INSN ("fmlal2", 0x6e20cc00, 0xffa0fc00, asimdsame, OP3 (Vd, Vn, Vm), QL_V3FML4S, 0),
+ FP16_V8_2A_INSN ("fmlsl2", 0x6ea0cc00, 0xffa0fc00, asimdsame, OP3 (Vd, Vn, Vm), QL_V3FML4S, 0),
+
+ FP16_V8_2A_INSN ("fmlal", 0xf800000, 0xffc0f400, asimdelem, OP3 (Vd, Vn, Em16), QL_V2FML2S, 0),
+ FP16_V8_2A_INSN ("fmlsl", 0xf804000, 0xffc0f400, asimdelem, OP3 (Vd, Vn, Em16), QL_V2FML2S, 0),
+ FP16_V8_2A_INSN ("fmlal2", 0x2f808000, 0xffc0f400, asimdelem, OP3 (Vd, Vn, Em16), QL_V2FML2S, 0),
+ FP16_V8_2A_INSN ("fmlsl2", 0x2f80c000, 0xffc0f400, asimdelem, OP3 (Vd, Vn, Em16), QL_V2FML2S, 0),
+
+ FP16_V8_2A_INSN ("fmlal", 0x4f800000, 0xffc0f400, asimdelem, OP3 (Vd, Vn, Em16), QL_V2FML4S, 0),
+ FP16_V8_2A_INSN ("fmlsl", 0x4f804000, 0xffc0f400, asimdelem, OP3 (Vd, Vn, Em16), QL_V2FML4S, 0),
+ FP16_V8_2A_INSN ("fmlal2", 0x6f808000, 0xffc0f400, asimdelem, OP3 (Vd, Vn, Em16), QL_V2FML4S, 0),
+ FP16_V8_2A_INSN ("fmlsl2", 0x6f80c000, 0xffc0f400, asimdelem, OP3 (Vd, Vn, Em16), QL_V2FML4S, 0),
/* System extensions ARMv8.4-a. */
FLAGM_INSN ("rmif", 0xba000400, 0xffe07c10, ic_system, OP3 (Rn, IMM_2, MASK), QL_RMIF, 0),
FLAGM_INSN ("setf8", 0x3a00080d, 0xfffffc1f, ic_system, OP1 (Rn), QL_SETF, 0),
FLAGM_INSN ("setf16", 0x3a00480d, 0xfffffc1f, ic_system, OP1 (Rn), QL_SETF, 0),
/* Memory access instructions ARMv8.4-a. */
- V8_4_INSN ("stlurb" , 0x19000000, 0xffe00c00, ldst_unscaled, OP2 (Rt, ADDR_OFFSET), QL_STLW, 0),
- V8_4_INSN ("ldapurb", 0x19400000, 0xffe00c00, ldst_unscaled, OP2 (Rt, ADDR_OFFSET), QL_STLW, 0),
- V8_4_INSN ("ldapursb", 0x19c00000, 0xffe00c00, ldst_unscaled, OP2 (Rt, ADDR_OFFSET), QL_STLW, 0),
- V8_4_INSN ("ldapursb", 0x19800000, 0xffe00c00, ldst_unscaled, OP2 (Rt, ADDR_OFFSET), QL_STLX, 0),
- V8_4_INSN ("stlurh", 0x59000000, 0xffe00c00, ldst_unscaled, OP2 (Rt, ADDR_OFFSET), QL_STLW, 0),
- V8_4_INSN ("ldapurh", 0x59400000, 0xffe00c00, ldst_unscaled, OP2 (Rt, ADDR_OFFSET), QL_STLW, 0),
- V8_4_INSN ("ldapursh", 0x59c00000, 0xffe00c00, ldst_unscaled, OP2 (Rt, ADDR_OFFSET), QL_STLW, 0),
- V8_4_INSN ("ldapursh", 0x59800000, 0xffe00c00, ldst_unscaled, OP2 (Rt, ADDR_OFFSET), QL_STLX, 0),
- V8_4_INSN ("stlur", 0x99000000, 0xffe00c00, ldst_unscaled, OP2 (Rt, ADDR_OFFSET), QL_STLW, 0),
- V8_4_INSN ("ldapur", 0x99400000, 0xffe00c00, ldst_unscaled, OP2 (Rt, ADDR_OFFSET), QL_STLW, 0),
- V8_4_INSN ("ldapursw", 0x99800000, 0xffe00c00, ldst_unscaled, OP2 (Rt, ADDR_OFFSET), QL_STLX, 0),
- V8_4_INSN ("stlur", 0xd9000000, 0xffe00c00, ldst_unscaled, OP2 (Rt, ADDR_OFFSET), QL_STLX, 0),
- V8_4_INSN ("ldapur", 0xd9400000, 0xffe00c00, ldst_unscaled, OP2 (Rt, ADDR_OFFSET), QL_STLX, 0),
+ V8_4A_INSN ("stlurb" , 0x19000000, 0xffe00c00, ldst_unscaled, OP2 (Rt, ADDR_OFFSET), QL_STLW, 0),
+ V8_4A_INSN ("ldapurb", 0x19400000, 0xffe00c00, ldst_unscaled, OP2 (Rt, ADDR_OFFSET), QL_STLW, 0),
+ V8_4A_INSN ("ldapursb", 0x19c00000, 0xffe00c00, ldst_unscaled, OP2 (Rt, ADDR_OFFSET), QL_STLW, 0),
+ V8_4A_INSN ("ldapursb", 0x19800000, 0xffe00c00, ldst_unscaled, OP2 (Rt, ADDR_OFFSET), QL_STLX, 0),
+ V8_4A_INSN ("stlurh", 0x59000000, 0xffe00c00, ldst_unscaled, OP2 (Rt, ADDR_OFFSET), QL_STLW, 0),
+ V8_4A_INSN ("ldapurh", 0x59400000, 0xffe00c00, ldst_unscaled, OP2 (Rt, ADDR_OFFSET), QL_STLW, 0),
+ V8_4A_INSN ("ldapursh", 0x59c00000, 0xffe00c00, ldst_unscaled, OP2 (Rt, ADDR_OFFSET), QL_STLW, 0),
+ V8_4A_INSN ("ldapursh", 0x59800000, 0xffe00c00, ldst_unscaled, OP2 (Rt, ADDR_OFFSET), QL_STLX, 0),
+ V8_4A_INSN ("stlur", 0x99000000, 0xffe00c00, ldst_unscaled, OP2 (Rt, ADDR_OFFSET), QL_STLW, 0),
+ V8_4A_INSN ("ldapur", 0x99400000, 0xffe00c00, ldst_unscaled, OP2 (Rt, ADDR_OFFSET), QL_STLW, 0),
+ V8_4A_INSN ("ldapursw", 0x99800000, 0xffe00c00, ldst_unscaled, OP2 (Rt, ADDR_OFFSET), QL_STLX, 0),
+ V8_4A_INSN ("stlur", 0xd9000000, 0xffe00c00, ldst_unscaled, OP2 (Rt, ADDR_OFFSET), QL_STLX, 0),
+ V8_4A_INSN ("ldapur", 0xd9400000, 0xffe00c00, ldst_unscaled, OP2 (Rt, ADDR_OFFSET), QL_STLX, 0),
/* Matrix Multiply instructions. */
INT8MATMUL_SVE_INSNC ("smmla", 0x45009800, 0xffe0fc00, sve_misc, OP3 (SVE_Zd, SVE_Zn, SVE_Zm_16), OP_SVE_SBB, 0, C_SCAN_MOVPRFX, 0),