+2020-07-14 Jan Beulich <jbeulich@suse.com>
+
+ * i386-dis.c (OP_VexR, VexScalarR): New.
+ (OP_EX_Vex, OP_XMM_Vex, EXdVexScalarS, EXqVexScalarS,
+ XMVexScalar, d_scalar_swap_mode, q_scalar_swap_mode,
+ need_vex_reg): Delete.
+ (prefix_table): Replace VexScalar by VexScalarR and
+ XMVexScalar by XMScalar for vmovss and vmovsd. Replace
+ EXdVexScalarS by EXdS and EXqVexScalarS by EXqS.
+ (vex_len_table): Replace EXqVexScalarS by EXqS.
+ (get_valid_dis386): Don't set need_vex_reg.
+ (print_insn): Don't initialize need_vex_reg.
+ (intel_operand_size, OP_E_memory): Drop d_scalar_swap_mode and
+ q_scalar_swap_mode cases.
+ (OP_EX): Don't check for d_scalar_swap_mode and
+ q_scalar_swap_mode.
+ (OP_VEX): Done check need_vex_reg.
+ * i386-dis-evex-w.h: Replace VexScalar by VexScalarR and
+ XMVexScalar by XMScalar for vmovss and vmovsd. Replace
+ EXdVexScalarS by EXdS and EXqVexScalarS by EXqS.
+
2020-07-14 Jan Beulich <jbeulich@suse.com>
* i386-dis.c (Vex128, Vex256, vex128_mode, vex256_mode): Delete.
/* EVEX_W_0F10_P_1 */
{
- { "vmovss", { XMVexScalar, VexScalar, EXxmm_md }, 0 },
+ { "vmovss", { XMScalar, VexScalarR, EXxmm_md }, 0 },
},
/* EVEX_W_0F10_P_3 */
{
{ Bad_Opcode },
- { "vmovsd", { XMVexScalar, VexScalar, EXxmm_mq }, 0 },
+ { "vmovsd", { XMScalar, VexScalarR, EXxmm_mq }, 0 },
},
/* EVEX_W_0F11_P_1 */
{
- { "vmovss", { EXdVexScalarS, VexScalar, XMScalar }, 0 },
+ { "vmovss", { EXdS, VexScalarR, XMScalar }, 0 },
},
/* EVEX_W_0F11_P_3 */
{
{ Bad_Opcode },
- { "vmovsd", { EXqVexScalarS, VexScalar, XMScalar }, 0 },
+ { "vmovsd", { EXqS, VexScalarR, XMScalar }, 0 },
},
/* EVEX_W_0F12_P_0_M_1 */
{
/* EVEX_W_0FD6_P_2 */
{
{ Bad_Opcode },
- { "vmovq", { EXqVexScalarS, XMScalar }, 0 },
+ { "vmovq", { EXqS, XMScalar }, 0 },
},
/* EVEX_W_0FE6_P_1 */
{
static void OP_XS (int, int);
static void OP_M (int, int);
static void OP_VEX (int, int);
+static void OP_VexR (int, int);
static void OP_VexW (int, int);
-static void OP_EX_Vex (int, int);
-static void OP_XMM_Vex (int, int);
static void OP_Rounding (int, int);
static void OP_REG_VexI4 (int, int);
static void OP_VexI4 (int, int);
#define Vex { OP_VEX, vex_mode }
#define VexW { OP_VexW, vex_mode }
#define VexScalar { OP_VEX, vex_scalar_mode }
+#define VexScalarR { OP_VexR, vex_scalar_mode }
#define VexGatherQ { OP_VEX, vex_vsib_q_w_dq_mode }
#define VexGdq { OP_VEX, dq_mode }
#define VexTmm { OP_VEX, tmm_mode }
-#define EXdVexScalarS { OP_EX_Vex, d_scalar_swap_mode }
-#define EXqVexScalarS { OP_EX_Vex, q_scalar_swap_mode }
-#define XMVexScalar { OP_XMM_Vex, scalar_mode }
#define XMVexI4 { OP_REG_VexI4, x_mode }
#define XMVexScalarI4 { OP_REG_VexI4, scalar_mode }
#define VexI4 { OP_VexI4, 0 }
/* scalar, ignore vector length. */
scalar_mode,
- /* like d_swap_mode, ignore vector length. */
- d_scalar_swap_mode,
- /* like q_swap_mode, ignore vector length. */
- q_scalar_swap_mode,
/* like vex_mode, ignore vector length. */
vex_scalar_mode,
/* Operand size depends on the VEX.W bit, ignore vector length. */
}
vex;
static unsigned char need_vex;
-static unsigned char need_vex_reg;
struct op
{
/* PREFIX_VEX_0F10 */
{
{ "vmovups", { XM, EXx }, 0 },
- { "vmovss", { XMVexScalar, VexScalar, EXxmm_md }, 0 },
+ { "vmovss", { XMScalar, VexScalarR, EXxmm_md }, 0 },
{ "vmovupd", { XM, EXx }, 0 },
- { "vmovsd", { XMVexScalar, VexScalar, EXxmm_mq }, 0 },
+ { "vmovsd", { XMScalar, VexScalarR, EXxmm_mq }, 0 },
},
/* PREFIX_VEX_0F11 */
{
{ "vmovups", { EXxS, XM }, 0 },
- { "vmovss", { EXdVexScalarS, VexScalar, XMScalar }, 0 },
+ { "vmovss", { EXdS, VexScalarR, XMScalar }, 0 },
{ "vmovupd", { EXxS, XM }, 0 },
- { "vmovsd", { EXqVexScalarS, VexScalar, XMScalar }, 0 },
+ { "vmovsd", { EXqS, VexScalarR, XMScalar }, 0 },
},
/* PREFIX_VEX_0F12 */
/* VEX_LEN_0FD6_P_2 */
{
- { "vmovq", { EXqVexScalarS, XMScalar }, 0 },
+ { "vmovq", { EXqS, XMScalar }, 0 },
},
/* VEX_LEN_0FF7_P_2 */
break;
}
need_vex = 1;
- need_vex_reg = 1;
codep++;
vindex = *codep++;
dp = &xop_table[vex_table_index][vindex];
break;
}
need_vex = 1;
- need_vex_reg = 1;
codep++;
vindex = *codep++;
dp = &vex_table[vex_table_index][vindex];
break;
}
need_vex = 1;
- need_vex_reg = 1;
codep++;
vindex = *codep++;
dp = &vex_table[dp->op[1].bytemode][vindex];
}
need_vex = 1;
- need_vex_reg = 1;
codep++;
vindex = *codep++;
dp = &evex_table[vex_table_index][vindex];
}
need_vex = 0;
- need_vex_reg = 0;
memset (&vex, 0, sizeof (vex));
if (dp->name == NULL && dp->op[0].bytemode == FLOATCODE)
used_prefixes |= (prefixes & PREFIX_DATA);
break;
case d_mode:
- case d_scalar_swap_mode:
case d_swap_mode:
case dqd_mode:
oappend ("DWORD PTR ");
break;
case q_mode:
- case q_scalar_swap_mode:
case q_swap_mode:
oappend ("QWORD PTR ");
break;
case xmm_md_mode:
case d_mode:
case d_swap_mode:
- case d_scalar_swap_mode:
shift = 2;
break;
}
case xmm_mq_mode:
case q_mode:
case q_swap_mode:
- case q_scalar_swap_mode:
shift = 3;
break;
case bw_unit_mode:
if ((sizeflag & SUFFIX_ALWAYS)
&& (bytemode == x_swap_mode
|| bytemode == d_swap_mode
- || bytemode == d_scalar_swap_mode
- || bytemode == q_swap_mode
- || bytemode == q_scalar_swap_mode))
+ || bytemode == q_swap_mode))
swap_operand ();
if (need_vex
&& bytemode != evex_half_bcst_xmmq_mode
&& bytemode != ymm_mode
&& bytemode != tmm_mode
- && bytemode != d_scalar_swap_mode
- && bytemode != q_scalar_swap_mode
&& bytemode != vex_scalar_w_dq_mode)
{
switch (vex.length)
if (!need_vex)
abort ();
- if (!need_vex_reg)
- return;
-
reg = vex.register_specifier;
vex.register_specifier = 0;
if (address_mode != mode_64bit)
oappend (names[reg]);
}
+static void
+OP_VexR (int bytemode, int sizeflag)
+{
+ if (modrm.mod == 3)
+ OP_VEX (bytemode, sizeflag);
+}
+
static void
OP_VexW (int bytemode, int sizeflag)
{
oappend_maybe_intel (scratchbuf);
}
-static void
-OP_EX_Vex (int bytemode, int sizeflag)
-{
- if (modrm.mod != 3)
- need_vex_reg = 0;
- OP_EX (bytemode, sizeflag);
-}
-
-static void
-OP_XMM_Vex (int bytemode, int sizeflag)
-{
- if (modrm.mod != 3)
- need_vex_reg = 0;
- OP_XMM (bytemode, sizeflag);
-}
-
static void
VPCMP_Fixup (int bytemode ATTRIBUTE_UNUSED,
int sizeflag ATTRIBUTE_UNUSED)