+2020-06-25 Jan Beulich <jbeulich@suse.com>
+
+ * config/tc-i386.c (process_suffix): Skip ambiguous operand size
+ diagnostic when there is a sizing prefix. Switch to word/dword/
+ qword encoding when there is a sizing prefix and no (explicit or
+ derived) suffix.
+ (update_imm): Handle presence of a sizing prefix.
+ * testsuite/gas/i386/noreg16-data32.d,
+ testsuite/gas/i386/noreg32-data16.d,
+ testsuite/gas/i386/noreg32-data16.e,
+ testsuite/gas/i386/noreg64-data16.d,
+ testsuite/gas/i386/noreg64-data16.e,
+ testsuite/gas/i386/noreg64-rex64.d: New.
+ * testsuite/gas/i386/i386.exp: Run new tests.
+ * testsuite/gas/i386/noreg32.s, testsuite/gas/i386/noreg64.s:
+ Introduce and use pfx* macros.
+ * testsuite/gas/i386/noreg16.s: Likewise. Replace 32-bit
+ addressing.
+ * testsuite/gas/i386/noreg16.d: Adjust expectations.
+
2020-06-25 Jan Beulich <jbeulich@suse.com>
* testsuite/gas/i386/avx-16bit.d,
&& !i.tm.opcode_modifier.no_lsuf
&& !i.tm.opcode_modifier.no_qsuf))
&& i.tm.opcode_modifier.mnemonicsize != IGNORESIZE
+ /* Explicit sizing prefixes are assumed to disambiguate insns. */
+ && !i.prefix[DATA_PREFIX] && !(i.prefix[REX_PREFIX] & REX_W)
/* Accept FLDENV et al without suffix. */
&& (i.tm.opcode_modifier.no_ssuf || i.tm.opcode_modifier.floatmf))
{
i.rex |= REX_W;
break;
+
+ case 0:
+ /* Select word/dword/qword operation with explict data sizing prefix
+ when there are no suitable register operands. */
+ if (i.tm.opcode_modifier.w
+ && (i.prefix[DATA_PREFIX] || (i.prefix[REX_PREFIX] & REX_W))
+ && (!i.reg_operands
+ || (i.reg_operands == 1
+ /* ShiftCount */
+ && (i.tm.operand_types[0].bitfield.instance == RegC
+ /* InOutPortReg */
+ || i.tm.operand_types[0].bitfield.instance == RegD
+ || i.tm.operand_types[1].bitfield.instance == RegD
+ /* CRC32 */
+ || i.tm.base_opcode == 0xf20f38f0))))
+ i.tm.base_opcode |= 1;
+ break;
}
if (i.tm.opcode_modifier.addrprefixopreg)
else
overlap = imm32s;
}
+ else if (i.prefix[REX_PREFIX] & REX_W)
+ overlap = operand_type_and (overlap, imm32s);
+ else if (i.prefix[DATA_PREFIX])
+ overlap = operand_type_and (overlap,
+ flag_code != CODE_16BIT ? imm16 : imm32);
if (!operand_type_equal (&overlap, &imm8)
&& !operand_type_equal (&overlap, &imm8s)
&& !operand_type_equal (&overlap, &imm16)
run_dump_test "nops-7"
run_dump_test "noreg16"
run_list_test "noreg16"
+ run_dump_test "noreg16-data32"
run_dump_test "noreg32"
run_list_test "noreg32"
+ run_dump_test "noreg32-data16"
run_list_test "movx16" "-I${srcdir}/$subdir -al"
run_list_test "movx32" "-al"
run_dump_test "addr16"
run_list_test "x86-64-sysenter-amd" "-mamd64"
run_dump_test "noreg64"
run_list_test "noreg64"
+ run_dump_test "noreg64-data16"
+ run_dump_test "noreg64-rex64"
run_dump_test "noreg-intel64"
run_list_test "noreg-intel64" "-I${srcdir}/$subdir -mintel64"
run_list_test "movx64" "-al"
--- /dev/null
+#as: --defsym DATA32=1
+#objdump: -dwMi8086
+#name: 16-bit insns not sizeable through register operands w/ data32
+#source: noreg16.s
+
+.*: +file format .*
+
+Disassembly of section .text:
+
+0+ <noreg>:
+ *[a-f0-9]+: 66 83 17 01 adcl \$0x1,\(%bx\)
+ *[a-f0-9]+: 66 81 17 89 00 00 00 adcl \$0x89,\(%bx\)
+ *[a-f0-9]+: 66 81 17 34 12 00 00 adcl \$0x1234,\(%bx\)
+ *[a-f0-9]+: 66 83 07 01 addl \$0x1,\(%bx\)
+ *[a-f0-9]+: 66 81 07 89 00 00 00 addl \$0x89,\(%bx\)
+ *[a-f0-9]+: 66 81 07 34 12 00 00 addl \$0x1234,\(%bx\)
+ *[a-f0-9]+: 66 83 27 01 andl \$0x1,\(%bx\)
+ *[a-f0-9]+: 66 81 27 89 00 00 00 andl \$0x89,\(%bx\)
+ *[a-f0-9]+: 66 81 27 34 12 00 00 andl \$0x1234,\(%bx\)
+ *[a-f0-9]+: 66 0f ba 27 01 btl \$0x1,\(%bx\)
+ *[a-f0-9]+: 66 0f ba 3f 01 btcl \$0x1,\(%bx\)
+ *[a-f0-9]+: 66 0f ba 37 01 btrl \$0x1,\(%bx\)
+ *[a-f0-9]+: 66 0f ba 2f 01 btsl \$0x1,\(%bx\)
+ *[a-f0-9]+: 66 ff 17 calll \*\(%bx\)
+ *[a-f0-9]+: 66 83 3f 01 cmpl \$0x1,\(%bx\)
+ *[a-f0-9]+: 66 81 3f 89 00 00 00 cmpl \$0x89,\(%bx\)
+ *[a-f0-9]+: 66 81 3f 34 12 00 00 cmpl \$0x1234,\(%bx\)
+ *[a-f0-9]+: 66 a7 cmpsl %es:\(%di\),%ds:\(%si\)
+ *[a-f0-9]+: 66 a7 cmpsl %es:\(%di\),%ds:\(%si\)
+ *[a-f0-9]+: 66 f2 0f 38 f1 07 crc32l \(%bx\),%eax
+ *[a-f0-9]+: f2 0f 2a 07 cvtsi2sd \(%bx\),%xmm0
+ *[a-f0-9]+: f3 0f 2a 07 cvtsi2ss \(%bx\),%xmm0
+ *[a-f0-9]+: 66 ff 0f decl \(%bx\)
+ *[a-f0-9]+: 66 f7 37 divl \(%bx\)
+ *[a-f0-9]+: 66 d8 07 data32 fadds \(%bx\)
+ *[a-f0-9]+: 66 d8 17 data32 fcoms \(%bx\)
+ *[a-f0-9]+: 66 d8 1f data32 fcomps \(%bx\)
+ *[a-f0-9]+: 66 d8 37 data32 fdivs \(%bx\)
+ *[a-f0-9]+: 66 d8 3f data32 fdivrs \(%bx\)
+ *[a-f0-9]+: 66 de 07 data32 fiadds \(%bx\)
+ *[a-f0-9]+: 66 de 17 data32 ficoms \(%bx\)
+ *[a-f0-9]+: 66 de 1f data32 ficomps \(%bx\)
+ *[a-f0-9]+: 66 de 37 data32 fidivs \(%bx\)
+ *[a-f0-9]+: 66 de 3f data32 fidivrs \(%bx\)
+ *[a-f0-9]+: 66 df 07 data32 filds \(%bx\)
+ *[a-f0-9]+: 66 de 0f data32 fimuls \(%bx\)
+ *[a-f0-9]+: 66 df 17 data32 fists \(%bx\)
+ *[a-f0-9]+: 66 df 1f data32 fistps \(%bx\)
+ *[a-f0-9]+: 66 df 0f data32 fisttps \(%bx\)
+ *[a-f0-9]+: 66 de 27 data32 fisubs \(%bx\)
+ *[a-f0-9]+: 66 de 2f data32 fisubrs \(%bx\)
+ *[a-f0-9]+: 66 d9 07 data32 flds \(%bx\)
+ *[a-f0-9]+: 66 d8 0f data32 fmuls \(%bx\)
+ *[a-f0-9]+: 66 d9 17 data32 fsts \(%bx\)
+ *[a-f0-9]+: 66 d9 1f data32 fstps \(%bx\)
+ *[a-f0-9]+: 66 d8 27 data32 fsubs \(%bx\)
+ *[a-f0-9]+: 66 d8 2f data32 fsubrs \(%bx\)
+ *[a-f0-9]+: 66 f7 3f idivl \(%bx\)
+ *[a-f0-9]+: 66 f7 2f imull \(%bx\)
+ *[a-f0-9]+: 66 e5 00 in \$0x0,%eax
+ *[a-f0-9]+: 66 ed in \(%dx\),%eax
+ *[a-f0-9]+: 66 ff 07 incl \(%bx\)
+ *[a-f0-9]+: 66 6d insl \(%dx\),%es:\(%di\)
+ *[a-f0-9]+: 66 6d insl \(%dx\),%es:\(%di\)
+ *[a-f0-9]+: 66 ff 27 jmpl \*\(%bx\)
+ *[a-f0-9]+: 66 0f 01 17 lgdtl \(%bx\)
+ *[a-f0-9]+: 66 0f 01 1f lidtl \(%bx\)
+ *[a-f0-9]+: 66 0f 00 17 data32 lldt \(%bx\)
+ *[a-f0-9]+: 66 0f 01 37 data32 lmsw \(%bx\)
+ *[a-f0-9]+: 66 ad lods %ds:\(%si\),%eax
+ *[a-f0-9]+: 66 ad lods %ds:\(%si\),%eax
+ *[a-f0-9]+: 66 0f 00 1f data32 ltr \(%bx\)
+ *[a-f0-9]+: 66 c7 07 12 00 00 00 movl \$0x12,\(%bx\)
+ *[a-f0-9]+: 66 c7 07 34 12 00 00 movl \$0x1234,\(%bx\)
+ *[a-f0-9]+: 66 8c 07 data32 mov %es,\(%bx\)
+ *[a-f0-9]+: 66 8e 07 data32 mov \(%bx\),%es
+ *[a-f0-9]+: 66 a5 movsl %ds:\(%si\),%es:\(%di\)
+ *[a-f0-9]+: 66 a5 movsl %ds:\(%si\),%es:\(%di\)
+ *[a-f0-9]+: 66 0f be 07 movsbl \(%bx\),%eax
+ *[a-f0-9]+: 66 0f be 07 movsbl \(%bx\),%eax
+ *[a-f0-9]+: 66 0f b6 07 movzbl \(%bx\),%eax
+ *[a-f0-9]+: 66 0f b6 07 movzbl \(%bx\),%eax
+ *[a-f0-9]+: 66 f7 27 mull \(%bx\)
+ *[a-f0-9]+: 66 f7 1f negl \(%bx\)
+ *[a-f0-9]+: 66 0f 1f 07 nopl \(%bx\)
+ *[a-f0-9]+: 66 f7 17 notl \(%bx\)
+ *[a-f0-9]+: 66 83 0f 01 orl \$0x1,\(%bx\)
+ *[a-f0-9]+: 66 81 0f 89 00 00 00 orl \$0x89,\(%bx\)
+ *[a-f0-9]+: 66 81 0f 34 12 00 00 orl \$0x1234,\(%bx\)
+ *[a-f0-9]+: 66 e7 00 out %eax,\$0x0
+ *[a-f0-9]+: 66 ef out %eax,\(%dx\)
+ *[a-f0-9]+: 66 6f outsl %ds:\(%si\),\(%dx\)
+ *[a-f0-9]+: 66 6f outsl %ds:\(%si\),\(%dx\)
+ *[a-f0-9]+: 66 8f 07 popl \(%bx\)
+ *[a-f0-9]+: 66 07 popl %es
+ *[a-f0-9]+: f3 0f ae 27 ptwrite \(%bx\)
+ *[a-f0-9]+: 66 ff 37 pushl \(%bx\)
+ *[a-f0-9]+: 66 06 pushl %es
+ *[a-f0-9]+: 66 d1 17 rcll \(%bx\)
+ *[a-f0-9]+: 66 c1 17 02 rcll \$0x2,\(%bx\)
+ *[a-f0-9]+: 66 d3 17 rcll %cl,\(%bx\)
+ *[a-f0-9]+: 66 d1 17 rcll \(%bx\)
+ *[a-f0-9]+: 66 d1 1f rcrl \(%bx\)
+ *[a-f0-9]+: 66 c1 1f 02 rcrl \$0x2,\(%bx\)
+ *[a-f0-9]+: 66 d3 1f rcrl %cl,\(%bx\)
+ *[a-f0-9]+: 66 d1 1f rcrl \(%bx\)
+ *[a-f0-9]+: 66 d1 07 roll \(%bx\)
+ *[a-f0-9]+: 66 c1 07 02 roll \$0x2,\(%bx\)
+ *[a-f0-9]+: 66 d3 07 roll %cl,\(%bx\)
+ *[a-f0-9]+: 66 d1 07 roll \(%bx\)
+ *[a-f0-9]+: 66 d1 0f rorl \(%bx\)
+ *[a-f0-9]+: 66 c1 0f 02 rorl \$0x2,\(%bx\)
+ *[a-f0-9]+: 66 d3 0f rorl %cl,\(%bx\)
+ *[a-f0-9]+: 66 d1 0f rorl \(%bx\)
+ *[a-f0-9]+: 66 83 1f 01 sbbl \$0x1,\(%bx\)
+ *[a-f0-9]+: 66 81 1f 89 00 00 00 sbbl \$0x89,\(%bx\)
+ *[a-f0-9]+: 66 81 1f 34 12 00 00 sbbl \$0x1234,\(%bx\)
+ *[a-f0-9]+: 66 af scas %es:\(%di\),%eax
+ *[a-f0-9]+: 66 af scas %es:\(%di\),%eax
+ *[a-f0-9]+: 66 d1 27 shll \(%bx\)
+ *[a-f0-9]+: 66 c1 27 02 shll \$0x2,\(%bx\)
+ *[a-f0-9]+: 66 d3 27 shll %cl,\(%bx\)
+ *[a-f0-9]+: 66 d1 27 shll \(%bx\)
+ *[a-f0-9]+: 66 d1 3f sarl \(%bx\)
+ *[a-f0-9]+: 66 c1 3f 02 sarl \$0x2,\(%bx\)
+ *[a-f0-9]+: 66 d3 3f sarl %cl,\(%bx\)
+ *[a-f0-9]+: 66 d1 3f sarl \(%bx\)
+ *[a-f0-9]+: 66 d1 27 shll \(%bx\)
+ *[a-f0-9]+: 66 c1 27 02 shll \$0x2,\(%bx\)
+ *[a-f0-9]+: 66 d3 27 shll %cl,\(%bx\)
+ *[a-f0-9]+: 66 d1 27 shll \(%bx\)
+ *[a-f0-9]+: 66 d1 2f shrl \(%bx\)
+ *[a-f0-9]+: 66 c1 2f 02 shrl \$0x2,\(%bx\)
+ *[a-f0-9]+: 66 d3 2f shrl %cl,\(%bx\)
+ *[a-f0-9]+: 66 d1 2f shrl \(%bx\)
+ *[a-f0-9]+: 66 ab stos %eax,%es:\(%di\)
+ *[a-f0-9]+: 66 ab stos %eax,%es:\(%di\)
+ *[a-f0-9]+: 66 83 2f 01 subl \$0x1,\(%bx\)
+ *[a-f0-9]+: 66 81 2f 89 00 00 00 subl \$0x89,\(%bx\)
+ *[a-f0-9]+: 66 81 2f 34 12 00 00 subl \$0x1234,\(%bx\)
+ *[a-f0-9]+: 66 f7 07 89 00 00 00 testl \$0x89,\(%bx\)
+ *[a-f0-9]+: 66 f7 07 34 12 00 00 testl \$0x1234,\(%bx\)
+ *[a-f0-9]+: c5 fb 2a 07 vcvtsi2sd \(%bx\),%xmm0,%xmm0
+ *[a-f0-9]+: 62 f1 7f 08 2a 07 vcvtsi2sd \(%bx\),%xmm0,%xmm0
+ *[a-f0-9]+: c5 fa 2a 07 vcvtsi2ss \(%bx\),%xmm0,%xmm0
+ *[a-f0-9]+: 62 f1 7e 08 2a 07 vcvtsi2ss \(%bx\),%xmm0,%xmm0
+ *[a-f0-9]+: 62 f1 7f 08 7b 07 vcvtusi2sd \(%bx\),%xmm0,%xmm0
+ *[a-f0-9]+: 62 f1 7e 08 7b 07 vcvtusi2ss \(%bx\),%xmm0,%xmm0
+ *[a-f0-9]+: 66 83 37 01 xorl \$0x1,\(%bx\)
+ *[a-f0-9]+: 66 81 37 89 00 00 00 xorl \$0x89,\(%bx\)
+ *[a-f0-9]+: 66 81 37 34 12 00 00 xorl \$0x1234,\(%bx\)
+#pass
*[a-f0-9]+: 81 3f 89 00 cmpw \$0x89,\(%bx\)
*[a-f0-9]+: 81 3f 34 12 cmpw \$0x1234,\(%bx\)
*[a-f0-9]+: a7 cmpsw %es:\(%di\),%ds:\(%si\)
- *[a-f0-9]+: 67 a7 cmpsw %es:\(%edi\),%ds:\(%esi\)
+ *[a-f0-9]+: a7 cmpsw %es:\(%di\),%ds:\(%si\)
*[a-f0-9]+: f2 0f 38 f1 07 crc32w \(%bx\),%eax
*[a-f0-9]+: f2 0f 2a 07 cvtsi2sd \(%bx\),%xmm0
*[a-f0-9]+: f3 0f 2a 07 cvtsi2ss \(%bx\),%xmm0
*[a-f0-9]+: ed in \(%dx\),%ax
*[a-f0-9]+: ff 07 incw \(%bx\)
*[a-f0-9]+: 6d insw \(%dx\),%es:\(%di\)
- *[a-f0-9]+: 67 6d insw \(%dx\),%es:\(%edi\)
+ *[a-f0-9]+: 6d insw \(%dx\),%es:\(%di\)
*[a-f0-9]+: ff 27 jmp \*\(%bx\)
*[a-f0-9]+: 0f 01 17 lgdtw \(%bx\)
*[a-f0-9]+: 0f 01 1f lidtw \(%bx\)
*[a-f0-9]+: 0f 00 17 lldt \(%bx\)
*[a-f0-9]+: 0f 01 37 lmsw \(%bx\)
*[a-f0-9]+: ad lods %ds:\(%si\),%ax
- *[a-f0-9]+: 67 ad lods %ds:\(%esi\),%ax
+ *[a-f0-9]+: ad lods %ds:\(%si\),%ax
*[a-f0-9]+: 0f 00 1f ltr \(%bx\)
*[a-f0-9]+: c7 07 12 00 movw \$0x12,\(%bx\)
*[a-f0-9]+: c7 07 34 12 movw \$0x1234,\(%bx\)
*[a-f0-9]+: 8c 07 mov %es,\(%bx\)
*[a-f0-9]+: 8e 07 mov \(%bx\),%es
*[a-f0-9]+: a5 movsw %ds:\(%si\),%es:\(%di\)
- *[a-f0-9]+: 67 a5 movsw %ds:\(%esi\),%es:\(%edi\)
+ *[a-f0-9]+: a5 movsw %ds:\(%si\),%es:\(%di\)
*[a-f0-9]+: 0f be 07 movsbw \(%bx\),%ax
*[a-f0-9]+: 66 0f be 07 movsbl \(%bx\),%eax
*[a-f0-9]+: 0f b6 07 movzbw \(%bx\),%ax
*[a-f0-9]+: e7 00 out %ax,\$0x0
*[a-f0-9]+: ef out %ax,\(%dx\)
*[a-f0-9]+: 6f outsw %ds:\(%si\),\(%dx\)
- *[a-f0-9]+: 67 6f outsw %ds:\(%esi\),\(%dx\)
+ *[a-f0-9]+: 6f outsw %ds:\(%si\),\(%dx\)
*[a-f0-9]+: 8f 07 popw \(%bx\)
*[a-f0-9]+: 07 pop %es
*[a-f0-9]+: f3 0f ae 27 ptwrite \(%bx\)
*[a-f0-9]+: 81 1f 89 00 sbbw \$0x89,\(%bx\)
*[a-f0-9]+: 81 1f 34 12 sbbw \$0x1234,\(%bx\)
*[a-f0-9]+: af scas %es:\(%di\),%ax
- *[a-f0-9]+: 67 af scas %es:\(%edi\),%ax
+ *[a-f0-9]+: af scas %es:\(%di\),%ax
*[a-f0-9]+: d1 27 shlw \(%bx\)
*[a-f0-9]+: c1 27 02 shlw \$0x2,\(%bx\)
*[a-f0-9]+: d3 27 shlw %cl,\(%bx\)
*[a-f0-9]+: d3 2f shrw %cl,\(%bx\)
*[a-f0-9]+: d1 2f shrw \(%bx\)
*[a-f0-9]+: ab stos %ax,%es:\(%di\)
- *[a-f0-9]+: 67 ab stos %ax,%es:\(%edi\)
+ *[a-f0-9]+: ab stos %ax,%es:\(%di\)
*[a-f0-9]+: 83 2f 01 subw \$0x1,\(%bx\)
*[a-f0-9]+: 81 2f 89 00 subw \$0x89,\(%bx\)
*[a-f0-9]+: 81 2f 34 12 subw \$0x1234,\(%bx\)
+ .macro pfx insn:vararg
+ .ifdef DATA32
+ data32 \insn
+ .else
+ \insn
+ .endif
+ .endm
+
.text
.code16
noreg:
- adc $1, (%bx)
- adc $0x89, (%bx)
- adc $0x1234, (%bx)
- add $1, (%bx)
- add $0x89, (%bx)
- add $0x1234, (%bx)
- and $1, (%bx)
- and $0x89, (%bx)
- and $0x1234, (%bx)
- bt $1, (%bx)
- btc $1, (%bx)
- btr $1, (%bx)
- bts $1, (%bx)
- call *(%bx)
- cmp $1, (%bx)
- cmp $0x89, (%bx)
- cmp $0x1234, (%bx)
- cmps
- cmps %es:(%edi), (%esi)
- crc32 (%bx), %eax
- cvtsi2sd (%bx), %xmm0
- cvtsi2ss (%bx), %xmm0
- dec (%bx)
- div (%bx)
- fadd (%bx)
- fcom (%bx)
- fcomp (%bx)
- fdiv (%bx)
- fdivr (%bx)
- fiadd (%bx)
- ficom (%bx)
- ficomp (%bx)
- fidiv (%bx)
- fidivr (%bx)
- fild (%bx)
- fimul (%bx)
- fist (%bx)
- fistp (%bx)
- fisttp (%bx)
- fisub (%bx)
- fisubr (%bx)
- fld (%bx)
- fmul (%bx)
- fst (%bx)
- fstp (%bx)
- fsub (%bx)
- fsubr (%bx)
- idiv (%bx)
- imul (%bx)
- in $0
- in %dx
- inc (%bx)
- ins
- ins %dx, %es:(%edi)
- jmp *(%bx)
- lgdt (%bx)
- lidt (%bx)
- lldt (%bx)
- lmsw (%bx)
- lods
- lods (%esi)
- ltr (%bx)
- mov $0x12, (%bx)
- mov $0x1234, (%bx)
- mov %es, (%bx)
- mov (%bx), %es
- movs
- movs (%esi), %es:(%edi)
- movsx (%bx), %ax
- movsx (%bx), %eax
- movzx (%bx), %ax
- movzx (%bx), %eax
- mul (%bx)
- neg (%bx)
- nop (%bx)
- not (%bx)
- or $1, (%bx)
- or $0x89, (%bx)
- or $0x1234, (%bx)
- out $0
- out %dx
- outs
- outs (%esi), %dx
- pop (%bx)
- pop %es
- ptwrite (%bx)
- push (%bx)
- push %es
- rcl $1, (%bx)
- rcl $2, (%bx)
- rcl %cl, (%bx)
- rcl (%bx)
- rcr $1, (%bx)
- rcr $2, (%bx)
- rcr %cl, (%bx)
- rcr (%bx)
- rol $1, (%bx)
- rol $2, (%bx)
- rol %cl, (%bx)
- rol (%bx)
- ror $1, (%bx)
- ror $2, (%bx)
- ror %cl, (%bx)
- ror (%bx)
- sbb $1, (%bx)
- sbb $0x89, (%bx)
- sbb $0x1234, (%bx)
- scas
- scas %es:(%edi)
- sal $1, (%bx)
- sal $2, (%bx)
- sal %cl, (%bx)
- sal (%bx)
- sar $1, (%bx)
- sar $2, (%bx)
- sar %cl, (%bx)
- sar (%bx)
- shl $1, (%bx)
- shl $2, (%bx)
- shl %cl, (%bx)
- shl (%bx)
- shr $1, (%bx)
- shr $2, (%bx)
- shr %cl, (%bx)
- shr (%bx)
- stos
- stos %es:(%edi)
- sub $1, (%bx)
- sub $0x89, (%bx)
- sub $0x1234, (%bx)
- test $0x89, (%bx)
- test $0x1234, (%bx)
- vcvtsi2sd (%bx), %xmm0, %xmm0
+ pfx adc $1, (%bx)
+ pfx adc $0x89, (%bx)
+ pfx adc $0x1234, (%bx)
+ pfx add $1, (%bx)
+ pfx add $0x89, (%bx)
+ pfx add $0x1234, (%bx)
+ pfx and $1, (%bx)
+ pfx and $0x89, (%bx)
+ pfx and $0x1234, (%bx)
+ pfx bt $1, (%bx)
+ pfx btc $1, (%bx)
+ pfx btr $1, (%bx)
+ pfx bts $1, (%bx)
+ pfx call *(%bx)
+ pfx cmp $1, (%bx)
+ pfx cmp $0x89, (%bx)
+ pfx cmp $0x1234, (%bx)
+ pfx cmps
+ pfx cmps %es:(%di), (%si)
+ pfx crc32 (%bx), %eax
+ cvtsi2sd (%bx), %xmm0
+ cvtsi2ss (%bx), %xmm0
+ pfx dec (%bx)
+ pfx div (%bx)
+ pfx fadd (%bx)
+ pfx fcom (%bx)
+ pfx fcomp (%bx)
+ pfx fdiv (%bx)
+ pfx fdivr (%bx)
+ pfx fiadd (%bx)
+ pfx ficom (%bx)
+ pfx ficomp (%bx)
+ pfx fidiv (%bx)
+ pfx fidivr (%bx)
+ pfx fild (%bx)
+ pfx fimul (%bx)
+ pfx fist (%bx)
+ pfx fistp (%bx)
+ pfx fisttp (%bx)
+ pfx fisub (%bx)
+ pfx fisubr (%bx)
+ pfx fld (%bx)
+ pfx fmul (%bx)
+ pfx fst (%bx)
+ pfx fstp (%bx)
+ pfx fsub (%bx)
+ pfx fsubr (%bx)
+ pfx idiv (%bx)
+ pfx imul (%bx)
+ pfx in $0
+ pfx in %dx
+ pfx inc (%bx)
+ pfx ins
+ pfx ins %dx, %es:(%di)
+ pfx jmp *(%bx)
+ pfx lgdt (%bx)
+ pfx lidt (%bx)
+ pfx lldt (%bx)
+ pfx lmsw (%bx)
+ pfx lods
+ pfx lods (%si)
+ pfx ltr (%bx)
+ pfx mov $0x12, (%bx)
+ pfx mov $0x1234, (%bx)
+ pfx mov %es, (%bx)
+ pfx mov (%bx), %es
+ pfx movs
+ pfx movs (%si), %es:(%di)
+ pfx movsx (%bx), %ax
+ movsx (%bx), %eax
+ pfx movzx (%bx), %ax
+ movzx (%bx), %eax
+ pfx mul (%bx)
+ pfx neg (%bx)
+ pfx nop (%bx)
+ pfx not (%bx)
+ pfx or $1, (%bx)
+ pfx or $0x89, (%bx)
+ pfx or $0x1234, (%bx)
+ pfx out $0
+ pfx out %dx
+ pfx outs
+ pfx outs (%si), %dx
+ pfx pop (%bx)
+ pfx pop %es
+ ptwrite (%bx)
+ pfx push (%bx)
+ pfx push %es
+ pfx rcl $1, (%bx)
+ pfx rcl $2, (%bx)
+ pfx rcl %cl, (%bx)
+ pfx rcl (%bx)
+ pfx rcr $1, (%bx)
+ pfx rcr $2, (%bx)
+ pfx rcr %cl, (%bx)
+ pfx rcr (%bx)
+ pfx rol $1, (%bx)
+ pfx rol $2, (%bx)
+ pfx rol %cl, (%bx)
+ pfx rol (%bx)
+ pfx ror $1, (%bx)
+ pfx ror $2, (%bx)
+ pfx ror %cl, (%bx)
+ pfx ror (%bx)
+ pfx sbb $1, (%bx)
+ pfx sbb $0x89, (%bx)
+ pfx sbb $0x1234, (%bx)
+ pfx scas
+ pfx scas %es:(%di)
+ pfx sal $1, (%bx)
+ pfx sal $2, (%bx)
+ pfx sal %cl, (%bx)
+ pfx sal (%bx)
+ pfx sar $1, (%bx)
+ pfx sar $2, (%bx)
+ pfx sar %cl, (%bx)
+ pfx sar (%bx)
+ pfx shl $1, (%bx)
+ pfx shl $2, (%bx)
+ pfx shl %cl, (%bx)
+ pfx shl (%bx)
+ pfx shr $1, (%bx)
+ pfx shr $2, (%bx)
+ pfx shr %cl, (%bx)
+ pfx shr (%bx)
+ pfx stos
+ pfx stos %es:(%di)
+ pfx sub $1, (%bx)
+ pfx sub $0x89, (%bx)
+ pfx sub $0x1234, (%bx)
+ pfx test $0x89, (%bx)
+ pfx test $0x1234, (%bx)
+ vcvtsi2sd (%bx), %xmm0, %xmm0
{evex} vcvtsi2sd (%bx), %xmm0, %xmm0
- vcvtsi2ss (%bx), %xmm0, %xmm0
+ vcvtsi2ss (%bx), %xmm0, %xmm0
{evex} vcvtsi2ss (%bx), %xmm0, %xmm0
- vcvtusi2sd (%bx), %xmm0, %xmm0
- vcvtusi2ss (%bx), %xmm0, %xmm0
- xor $1, (%bx)
- xor $0x89, (%bx)
- xor $0x1234, (%bx)
+ vcvtusi2sd (%bx), %xmm0, %xmm0
+ vcvtusi2ss (%bx), %xmm0, %xmm0
+ pfx xor $1, (%bx)
+ pfx xor $0x89, (%bx)
+ pfx xor $0x1234, (%bx)
--- /dev/null
+#as: --defsym DATA16=1
+#objdump: -dw
+#name: 32-bit insns not sizeable through register operands w/ data16
+#source: noreg32.s
+#warning_output: noreg32-data16.e
+
+.*: +file format .*
+
+Disassembly of section .text:
+
+0+ <noreg>:
+ *[a-f0-9]+: 66 83 10 01 adcw \$0x1,\(%eax\)
+ *[a-f0-9]+: 66 81 10 89 00 adcw \$0x89,\(%eax\)
+ *[a-f0-9]+: 66 81 10 34 12 adcw \$0x1234,\(%eax\)
+ *[a-f0-9]+: 66 81 10 78 56 adcw \$0x5678,\(%eax\)
+ *[a-f0-9]+: 66 83 00 01 addw \$0x1,\(%eax\)
+ *[a-f0-9]+: 66 81 00 89 00 addw \$0x89,\(%eax\)
+ *[a-f0-9]+: 66 81 00 34 12 addw \$0x1234,\(%eax\)
+ *[a-f0-9]+: 66 81 00 78 56 addw \$0x5678,\(%eax\)
+ *[a-f0-9]+: 66 83 20 01 andw \$0x1,\(%eax\)
+ *[a-f0-9]+: 66 81 20 89 00 andw \$0x89,\(%eax\)
+ *[a-f0-9]+: 66 81 20 34 12 andw \$0x1234,\(%eax\)
+ *[a-f0-9]+: 66 81 20 78 56 andw \$0x5678,\(%eax\)
+ *[a-f0-9]+: 66 0f ba 20 01 btw \$0x1,\(%eax\)
+ *[a-f0-9]+: 66 0f ba 38 01 btcw \$0x1,\(%eax\)
+ *[a-f0-9]+: 66 0f ba 30 01 btrw \$0x1,\(%eax\)
+ *[a-f0-9]+: 66 0f ba 28 01 btsw \$0x1,\(%eax\)
+ *[a-f0-9]+: 66 ff 10 callw \*\(%eax\)
+ *[a-f0-9]+: 66 83 38 01 cmpw \$0x1,\(%eax\)
+ *[a-f0-9]+: 66 81 38 89 00 cmpw \$0x89,\(%eax\)
+ *[a-f0-9]+: 66 81 38 34 12 cmpw \$0x1234,\(%eax\)
+ *[a-f0-9]+: 66 81 38 78 56 cmpw \$0x5678,\(%eax\)
+ *[a-f0-9]+: 66 a7 cmpsw %es:\(%edi\),%ds:\(%esi\)
+ *[a-f0-9]+: 66 a7 cmpsw %es:\(%edi\),%ds:\(%esi\)
+ *[a-f0-9]+: 66 f2 0f 38 f1 00 crc32w \(%eax\),%eax
+ *[a-f0-9]+: f2 0f 2a 00 cvtsi2sd \(%eax\),%xmm0
+ *[a-f0-9]+: f3 0f 2a 00 cvtsi2ss \(%eax\),%xmm0
+ *[a-f0-9]+: 66 ff 08 decw \(%eax\)
+ *[a-f0-9]+: 66 f7 30 divw \(%eax\)
+ *[a-f0-9]+: 66 d8 00 data16 fadds \(%eax\)
+ *[a-f0-9]+: 66 d8 10 data16 fcoms \(%eax\)
+ *[a-f0-9]+: 66 d8 18 data16 fcomps \(%eax\)
+ *[a-f0-9]+: 66 d8 30 data16 fdivs \(%eax\)
+ *[a-f0-9]+: 66 d8 38 data16 fdivrs \(%eax\)
+ *[a-f0-9]+: 66 de 00 data16 fiadds \(%eax\)
+ *[a-f0-9]+: 66 de 10 data16 ficoms \(%eax\)
+ *[a-f0-9]+: 66 de 18 data16 ficomps \(%eax\)
+ *[a-f0-9]+: 66 de 30 data16 fidivs \(%eax\)
+ *[a-f0-9]+: 66 de 38 data16 fidivrs \(%eax\)
+ *[a-f0-9]+: 66 df 00 data16 filds \(%eax\)
+ *[a-f0-9]+: 66 de 08 data16 fimuls \(%eax\)
+ *[a-f0-9]+: 66 df 10 data16 fists \(%eax\)
+ *[a-f0-9]+: 66 df 18 data16 fistps \(%eax\)
+ *[a-f0-9]+: 66 df 08 data16 fisttps \(%eax\)
+ *[a-f0-9]+: 66 de 20 data16 fisubs \(%eax\)
+ *[a-f0-9]+: 66 de 28 data16 fisubrs \(%eax\)
+ *[a-f0-9]+: 66 d9 00 data16 flds \(%eax\)
+ *[a-f0-9]+: 66 d8 08 data16 fmuls \(%eax\)
+ *[a-f0-9]+: 66 d9 10 data16 fsts \(%eax\)
+ *[a-f0-9]+: 66 d9 18 data16 fstps \(%eax\)
+ *[a-f0-9]+: 66 d8 20 data16 fsubs \(%eax\)
+ *[a-f0-9]+: 66 d8 28 data16 fsubrs \(%eax\)
+ *[a-f0-9]+: 66 f7 38 idivw \(%eax\)
+ *[a-f0-9]+: 66 f7 28 imulw \(%eax\)
+ *[a-f0-9]+: 66 e5 00 in \$0x0,%ax
+ *[a-f0-9]+: 66 ed in \(%dx\),%ax
+ *[a-f0-9]+: 66 ff 00 incw \(%eax\)
+ *[a-f0-9]+: 66 6d insw \(%dx\),%es:\(%edi\)
+ *[a-f0-9]+: 66 6d insw \(%dx\),%es:\(%edi\)
+ *[a-f0-9]+: 66 ff 20 jmpw \*\(%eax\)
+ *[a-f0-9]+: 66 0f 01 10 lgdtw \(%eax\)
+ *[a-f0-9]+: 66 0f 01 18 lidtw \(%eax\)
+ *[a-f0-9]+: 66 0f 00 10 data16 lldt \(%eax\)
+ *[a-f0-9]+: 66 0f 01 30 data16 lmsw \(%eax\)
+ *[a-f0-9]+: 66 ad lods %ds:\(%esi\),%ax
+ *[a-f0-9]+: 66 ad lods %ds:\(%esi\),%ax
+ *[a-f0-9]+: 66 0f 00 18 data16 ltr \(%eax\)
+ *[a-f0-9]+: 66 c7 00 12 00 movw \$0x12,\(%eax\)
+ *[a-f0-9]+: 66 c7 00 34 12 movw \$0x1234,\(%eax\)
+ *[a-f0-9]+: 66 c7 00 78 56 movw \$0x5678,\(%eax\)
+ *[a-f0-9]+: 66 8c 00 data16 mov %es,\(%eax\)
+ *[a-f0-9]+: 66 8e 00 data16 mov \(%eax\),%es
+ *[a-f0-9]+: 66 a5 movsw %ds:\(%esi\),%es:\(%edi\)
+ *[a-f0-9]+: 66 a5 movsw %ds:\(%esi\),%es:\(%edi\)
+ *[a-f0-9]+: 66 0f be 00 movsbw \(%eax\),%ax
+ *[a-f0-9]+: 66 0f be 00 movsbw \(%eax\),%ax
+ *[a-f0-9]+: 66 0f b6 00 movzbw \(%eax\),%ax
+ *[a-f0-9]+: 66 0f b6 00 movzbw \(%eax\),%ax
+ *[a-f0-9]+: 66 f7 20 mulw \(%eax\)
+ *[a-f0-9]+: 66 f7 18 negw \(%eax\)
+ *[a-f0-9]+: 66 0f 1f 00 nopw \(%eax\)
+ *[a-f0-9]+: 66 f7 10 notw \(%eax\)
+ *[a-f0-9]+: 66 83 08 01 orw \$0x1,\(%eax\)
+ *[a-f0-9]+: 66 81 08 89 00 orw \$0x89,\(%eax\)
+ *[a-f0-9]+: 66 81 08 34 12 orw \$0x1234,\(%eax\)
+ *[a-f0-9]+: 66 81 08 78 56 orw \$0x5678,\(%eax\)
+ *[a-f0-9]+: 66 e7 00 out %ax,\$0x0
+ *[a-f0-9]+: 66 ef out %ax,\(%dx\)
+ *[a-f0-9]+: 66 6f outsw %ds:\(%esi\),\(%dx\)
+ *[a-f0-9]+: 66 6f outsw %ds:\(%esi\),\(%dx\)
+ *[a-f0-9]+: 66 8f 00 popw \(%eax\)
+ *[a-f0-9]+: 66 07 popw %es
+ *[a-f0-9]+: f3 0f ae 20 ptwrite \(%eax\)
+ *[a-f0-9]+: 66 ff 30 pushw \(%eax\)
+ *[a-f0-9]+: 66 06 pushw %es
+ *[a-f0-9]+: 66 d1 10 rclw \(%eax\)
+ *[a-f0-9]+: 66 c1 10 02 rclw \$0x2,\(%eax\)
+ *[a-f0-9]+: 66 d3 10 rclw %cl,\(%eax\)
+ *[a-f0-9]+: 66 d1 10 rclw \(%eax\)
+ *[a-f0-9]+: 66 d1 18 rcrw \(%eax\)
+ *[a-f0-9]+: 66 c1 18 02 rcrw \$0x2,\(%eax\)
+ *[a-f0-9]+: 66 d3 18 rcrw %cl,\(%eax\)
+ *[a-f0-9]+: 66 d1 18 rcrw \(%eax\)
+ *[a-f0-9]+: 66 d1 00 rolw \(%eax\)
+ *[a-f0-9]+: 66 c1 00 02 rolw \$0x2,\(%eax\)
+ *[a-f0-9]+: 66 d3 00 rolw %cl,\(%eax\)
+ *[a-f0-9]+: 66 d1 00 rolw \(%eax\)
+ *[a-f0-9]+: 66 d1 08 rorw \(%eax\)
+ *[a-f0-9]+: 66 c1 08 02 rorw \$0x2,\(%eax\)
+ *[a-f0-9]+: 66 d3 08 rorw %cl,\(%eax\)
+ *[a-f0-9]+: 66 d1 08 rorw \(%eax\)
+ *[a-f0-9]+: 66 83 18 01 sbbw \$0x1,\(%eax\)
+ *[a-f0-9]+: 66 81 18 89 00 sbbw \$0x89,\(%eax\)
+ *[a-f0-9]+: 66 81 18 34 12 sbbw \$0x1234,\(%eax\)
+ *[a-f0-9]+: 66 81 18 78 56 sbbw \$0x5678,\(%eax\)
+ *[a-f0-9]+: 66 af scas %es:\(%edi\),%ax
+ *[a-f0-9]+: 66 af scas %es:\(%edi\),%ax
+ *[a-f0-9]+: 66 d1 20 shlw \(%eax\)
+ *[a-f0-9]+: 66 c1 20 02 shlw \$0x2,\(%eax\)
+ *[a-f0-9]+: 66 d3 20 shlw %cl,\(%eax\)
+ *[a-f0-9]+: 66 d1 20 shlw \(%eax\)
+ *[a-f0-9]+: 66 d1 38 sarw \(%eax\)
+ *[a-f0-9]+: 66 c1 38 02 sarw \$0x2,\(%eax\)
+ *[a-f0-9]+: 66 d3 38 sarw %cl,\(%eax\)
+ *[a-f0-9]+: 66 d1 38 sarw \(%eax\)
+ *[a-f0-9]+: 66 d1 20 shlw \(%eax\)
+ *[a-f0-9]+: 66 c1 20 02 shlw \$0x2,\(%eax\)
+ *[a-f0-9]+: 66 d3 20 shlw %cl,\(%eax\)
+ *[a-f0-9]+: 66 d1 20 shlw \(%eax\)
+ *[a-f0-9]+: 66 d1 28 shrw \(%eax\)
+ *[a-f0-9]+: 66 c1 28 02 shrw \$0x2,\(%eax\)
+ *[a-f0-9]+: 66 d3 28 shrw %cl,\(%eax\)
+ *[a-f0-9]+: 66 d1 28 shrw \(%eax\)
+ *[a-f0-9]+: 66 ab stos %ax,%es:\(%edi\)
+ *[a-f0-9]+: 66 ab stos %ax,%es:\(%edi\)
+ *[a-f0-9]+: 66 83 28 01 subw \$0x1,\(%eax\)
+ *[a-f0-9]+: 66 81 28 89 00 subw \$0x89,\(%eax\)
+ *[a-f0-9]+: 66 81 28 34 12 subw \$0x1234,\(%eax\)
+ *[a-f0-9]+: 66 81 28 78 56 subw \$0x5678,\(%eax\)
+ *[a-f0-9]+: 66 f7 00 89 00 testw \$0x89,\(%eax\)
+ *[a-f0-9]+: 66 f7 00 34 12 testw \$0x1234,\(%eax\)
+ *[a-f0-9]+: 66 f7 00 78 56 testw \$0x5678,\(%eax\)
+ *[a-f0-9]+: c5 fb 2a 00 vcvtsi2sd \(%eax\),%xmm0,%xmm0
+ *[a-f0-9]+: 62 f1 7f 08 2a 00 vcvtsi2sd \(%eax\),%xmm0,%xmm0
+ *[a-f0-9]+: c5 fa 2a 00 vcvtsi2ss \(%eax\),%xmm0,%xmm0
+ *[a-f0-9]+: 62 f1 7e 08 2a 00 vcvtsi2ss \(%eax\),%xmm0,%xmm0
+ *[a-f0-9]+: 62 f1 7f 08 7b 00 vcvtusi2sd \(%eax\),%xmm0,%xmm0
+ *[a-f0-9]+: 62 f1 7e 08 7b 00 vcvtusi2ss \(%eax\),%xmm0,%xmm0
+ *[a-f0-9]+: 66 83 30 01 xorw \$0x1,\(%eax\)
+ *[a-f0-9]+: 66 81 30 89 00 xorw \$0x89,\(%eax\)
+ *[a-f0-9]+: 66 81 30 34 12 xorw \$0x1234,\(%eax\)
+ *[a-f0-9]+: 66 81 30 78 56 xorw \$0x5678,\(%eax\)
+#pass
--- /dev/null
+.*: Assembler messages:
+.*:14: Warning: .*shortened.*
+.*:18: Warning: .*shortened.*
+.*:22: Warning: .*shortened.*
+.*:31: Warning: .*shortened.*
+.*:79: Warning: .*shortened.*
+.*:95: Warning: .*shortened.*
+.*:124: Warning: .*shortened.*
+.*:148: Warning: .*shortened.*
+.*:151: Warning: .*shortened.*
+.*:161: Warning: .*shortened.*
+ .macro pfx insn:vararg
+ .ifdef DATA16
+ data16 \insn
+ .else
+ \insn
+ .endif
+ .endm
+
.text
noreg:
- adc $1, (%eax)
- adc $0x89, (%eax)
- adc $0x1234, (%eax)
- adc $0x12345678, (%eax)
- add $1, (%eax)
- add $0x89, (%eax)
- add $0x1234, (%eax)
- add $0x12345678, (%eax)
- and $1, (%eax)
- and $0x89, (%eax)
- and $0x1234, (%eax)
- and $0x12345678, (%eax)
- bt $1, (%eax)
- btc $1, (%eax)
- btr $1, (%eax)
- bts $1, (%eax)
- call *(%eax)
- cmp $1, (%eax)
- cmp $0x89, (%eax)
- cmp $0x1234, (%eax)
- cmp $0x12345678, (%eax)
- cmps
- cmps %es:(%edi), (%esi)
- crc32 (%eax), %eax
- cvtsi2sd (%eax), %xmm0
- cvtsi2ss (%eax), %xmm0
- dec (%eax)
- div (%eax)
- fadd (%eax)
- fcom (%eax)
- fcomp (%eax)
- fdiv (%eax)
- fdivr (%eax)
- fiadd (%eax)
- ficom (%eax)
- ficomp (%eax)
- fidiv (%eax)
- fidivr (%eax)
- fild (%eax)
- fimul (%eax)
- fist (%eax)
- fistp (%eax)
- fisttp (%eax)
- fisub (%eax)
- fisubr (%eax)
- fld (%eax)
- fmul (%eax)
- fst (%eax)
- fstp (%eax)
- fsub (%eax)
- fsubr (%eax)
- idiv (%eax)
- imul (%eax)
- in $0
- in %dx
- inc (%eax)
- ins
- ins %dx, %es:(%edi)
- jmp *(%eax)
- lgdt (%eax)
- lidt (%eax)
- lldt (%eax)
- lmsw (%eax)
- lods
- lods (%esi)
- ltr (%eax)
- mov $0x12, (%eax)
- mov $0x1234, (%eax)
- mov $0x12345678, (%eax)
- mov %es, (%eax)
- mov (%eax), %es
- movs
- movs (%esi), %es:(%edi)
- movsx (%eax), %ax
- movsx (%eax), %eax
- movzx (%eax), %ax
- movzx (%eax), %eax
- mul (%eax)
- neg (%eax)
- nop (%eax)
- not (%eax)
- or $1, (%eax)
- or $0x89, (%eax)
- or $0x1234, (%eax)
- or $0x12345678, (%eax)
- out $0
- out %dx
- outs
- outs (%esi), %dx
- pop (%eax)
- pop %es
- ptwrite (%eax)
- push (%eax)
- push %es
- rcl $1, (%eax)
- rcl $2, (%eax)
- rcl %cl, (%eax)
- rcl (%eax)
- rcr $1, (%eax)
- rcr $2, (%eax)
- rcr %cl, (%eax)
- rcr (%eax)
- rol $1, (%eax)
- rol $2, (%eax)
- rol %cl, (%eax)
- rol (%eax)
- ror $1, (%eax)
- ror $2, (%eax)
- ror %cl, (%eax)
- ror (%eax)
- sbb $1, (%eax)
- sbb $0x89, (%eax)
- sbb $0x1234, (%eax)
- sbb $0x12345678, (%eax)
- scas
- scas %es:(%edi)
- sal $1, (%eax)
- sal $2, (%eax)
- sal %cl, (%eax)
- sal (%eax)
- sar $1, (%eax)
- sar $2, (%eax)
- sar %cl, (%eax)
- sar (%eax)
- shl $1, (%eax)
- shl $2, (%eax)
- shl %cl, (%eax)
- shl (%eax)
- shr $1, (%eax)
- shr $2, (%eax)
- shr %cl, (%eax)
- shr (%eax)
- stos
- stos %es:(%edi)
- sub $1, (%eax)
- sub $0x89, (%eax)
- sub $0x1234, (%eax)
- sub $0x12345678, (%eax)
- test $0x89, (%eax)
- test $0x1234, (%eax)
- test $0x12345678, (%eax)
- vcvtsi2sd (%eax), %xmm0, %xmm0
+ pfx adc $1, (%eax)
+ pfx adc $0x89, (%eax)
+ pfx adc $0x1234, (%eax)
+ pfx adc $0x12345678, (%eax)
+ pfx add $1, (%eax)
+ pfx add $0x89, (%eax)
+ pfx add $0x1234, (%eax)
+ pfx add $0x12345678, (%eax)
+ pfx and $1, (%eax)
+ pfx and $0x89, (%eax)
+ pfx and $0x1234, (%eax)
+ pfx and $0x12345678, (%eax)
+ pfx bt $1, (%eax)
+ pfx btc $1, (%eax)
+ pfx btr $1, (%eax)
+ pfx bts $1, (%eax)
+ pfx call *(%eax)
+ pfx cmp $1, (%eax)
+ pfx cmp $0x89, (%eax)
+ pfx cmp $0x1234, (%eax)
+ pfx cmp $0x12345678, (%eax)
+ pfx cmps
+ pfx cmps %es:(%edi), (%esi)
+ pfx crc32 (%eax), %eax
+ cvtsi2sd (%eax), %xmm0
+ cvtsi2ss (%eax), %xmm0
+ pfx dec (%eax)
+ pfx div (%eax)
+ pfx fadd (%eax)
+ pfx fcom (%eax)
+ pfx fcomp (%eax)
+ pfx fdiv (%eax)
+ pfx fdivr (%eax)
+ pfx fiadd (%eax)
+ pfx ficom (%eax)
+ pfx ficomp (%eax)
+ pfx fidiv (%eax)
+ pfx fidivr (%eax)
+ pfx fild (%eax)
+ pfx fimul (%eax)
+ pfx fist (%eax)
+ pfx fistp (%eax)
+ pfx fisttp (%eax)
+ pfx fisub (%eax)
+ pfx fisubr (%eax)
+ pfx fld (%eax)
+ pfx fmul (%eax)
+ pfx fst (%eax)
+ pfx fstp (%eax)
+ pfx fsub (%eax)
+ pfx fsubr (%eax)
+ pfx idiv (%eax)
+ pfx imul (%eax)
+ pfx in $0
+ pfx in %dx
+ pfx inc (%eax)
+ pfx ins
+ pfx ins %dx, %es:(%edi)
+ pfx jmp *(%eax)
+ pfx lgdt (%eax)
+ pfx lidt (%eax)
+ pfx lldt (%eax)
+ pfx lmsw (%eax)
+ pfx lods
+ pfx lods (%esi)
+ pfx ltr (%eax)
+ pfx mov $0x12, (%eax)
+ pfx mov $0x1234, (%eax)
+ pfx mov $0x12345678, (%eax)
+ pfx mov %es, (%eax)
+ pfx mov (%eax), %es
+ pfx movs
+ pfx movs (%esi), %es:(%edi)
+ movsx (%eax), %ax
+ pfx movsx (%eax), %eax
+ movzx (%eax), %ax
+ pfx movzx (%eax), %eax
+ pfx mul (%eax)
+ pfx neg (%eax)
+ pfx nop (%eax)
+ pfx not (%eax)
+ pfx or $1, (%eax)
+ pfx or $0x89, (%eax)
+ pfx or $0x1234, (%eax)
+ pfx or $0x12345678, (%eax)
+ pfx out $0
+ pfx out %dx
+ pfx outs
+ pfx outs (%esi), %dx
+ pfx pop (%eax)
+ pfx pop %es
+ ptwrite (%eax)
+ pfx push (%eax)
+ pfx push %es
+ pfx rcl $1, (%eax)
+ pfx rcl $2, (%eax)
+ pfx rcl %cl, (%eax)
+ pfx rcl (%eax)
+ pfx rcr $1, (%eax)
+ pfx rcr $2, (%eax)
+ pfx rcr %cl, (%eax)
+ pfx rcr (%eax)
+ pfx rol $1, (%eax)
+ pfx rol $2, (%eax)
+ pfx rol %cl, (%eax)
+ pfx rol (%eax)
+ pfx ror $1, (%eax)
+ pfx ror $2, (%eax)
+ pfx ror %cl, (%eax)
+ pfx ror (%eax)
+ pfx sbb $1, (%eax)
+ pfx sbb $0x89, (%eax)
+ pfx sbb $0x1234, (%eax)
+ pfx sbb $0x12345678, (%eax)
+ pfx scas
+ pfx scas %es:(%edi)
+ pfx sal $1, (%eax)
+ pfx sal $2, (%eax)
+ pfx sal %cl, (%eax)
+ pfx sal (%eax)
+ pfx sar $1, (%eax)
+ pfx sar $2, (%eax)
+ pfx sar %cl, (%eax)
+ pfx sar (%eax)
+ pfx shl $1, (%eax)
+ pfx shl $2, (%eax)
+ pfx shl %cl, (%eax)
+ pfx shl (%eax)
+ pfx shr $1, (%eax)
+ pfx shr $2, (%eax)
+ pfx shr %cl, (%eax)
+ pfx shr (%eax)
+ pfx stos
+ pfx stos %es:(%edi)
+ pfx sub $1, (%eax)
+ pfx sub $0x89, (%eax)
+ pfx sub $0x1234, (%eax)
+ pfx sub $0x12345678, (%eax)
+ pfx test $0x89, (%eax)
+ pfx test $0x1234, (%eax)
+ pfx test $0x12345678, (%eax)
+ vcvtsi2sd (%eax), %xmm0, %xmm0
{evex} vcvtsi2sd (%eax), %xmm0, %xmm0
- vcvtsi2ss (%eax), %xmm0, %xmm0
+ vcvtsi2ss (%eax), %xmm0, %xmm0
{evex} vcvtsi2ss (%eax), %xmm0, %xmm0
- vcvtusi2sd (%eax), %xmm0, %xmm0
- vcvtusi2ss (%eax), %xmm0, %xmm0
- xor $1, (%eax)
- xor $0x89, (%eax)
- xor $0x1234, (%eax)
- xor $0x12345678, (%eax)
+ vcvtusi2sd (%eax), %xmm0, %xmm0
+ vcvtusi2ss (%eax), %xmm0, %xmm0
+ pfx xor $1, (%eax)
+ pfx xor $0x89, (%eax)
+ pfx xor $0x1234, (%eax)
+ pfx xor $0x12345678, (%eax)
--- /dev/null
+#as: --defsym DATA16=1
+#objdump: -dw
+#name: 64-bit insns not sizeable through register operands w/ data16
+#source: noreg64.s
+#warning_output: noreg64-data16.e
+
+.*: +file format .*
+
+Disassembly of section .text:
+
+0+ <noreg>:
+ *[a-f0-9]+: 66 83 10 01 adcw \$0x1,\(%rax\)
+ *[a-f0-9]+: 66 81 10 89 00 adcw \$0x89,\(%rax\)
+ *[a-f0-9]+: 66 81 10 34 12 adcw \$0x1234,\(%rax\)
+ *[a-f0-9]+: 66 81 10 78 56 adcw \$0x5678,\(%rax\)
+ *[a-f0-9]+: 66 83 00 01 addw \$0x1,\(%rax\)
+ *[a-f0-9]+: 66 81 00 89 00 addw \$0x89,\(%rax\)
+ *[a-f0-9]+: 66 81 00 34 12 addw \$0x1234,\(%rax\)
+ *[a-f0-9]+: 66 81 00 78 56 addw \$0x5678,\(%rax\)
+ *[a-f0-9]+: 66 83 20 01 andw \$0x1,\(%rax\)
+ *[a-f0-9]+: 66 81 20 89 00 andw \$0x89,\(%rax\)
+ *[a-f0-9]+: 66 81 20 34 12 andw \$0x1234,\(%rax\)
+ *[a-f0-9]+: 66 81 20 78 56 andw \$0x5678,\(%rax\)
+ *[a-f0-9]+: 66 0f ba 20 01 btw \$0x1,\(%rax\)
+ *[a-f0-9]+: 66 0f ba 38 01 btcw \$0x1,\(%rax\)
+ *[a-f0-9]+: 66 0f ba 30 01 btrw \$0x1,\(%rax\)
+ *[a-f0-9]+: 66 0f ba 28 01 btsw \$0x1,\(%rax\)
+ *[a-f0-9]+: 66 ff 10 callw \*\(%rax\)
+ *[a-f0-9]+: 66 83 38 01 cmpw \$0x1,\(%rax\)
+ *[a-f0-9]+: 66 81 38 89 00 cmpw \$0x89,\(%rax\)
+ *[a-f0-9]+: 66 81 38 34 12 cmpw \$0x1234,\(%rax\)
+ *[a-f0-9]+: 66 81 38 78 56 cmpw \$0x5678,\(%rax\)
+ *[a-f0-9]+: 66 a7 cmpsw %es:\(%rdi\),%ds:\(%rsi\)
+ *[a-f0-9]+: 66 a7 cmpsw %es:\(%rdi\),%ds:\(%rsi\)
+ *[a-f0-9]+: 66 f2 0f 38 f1 00 crc32w \(%rax\),%eax
+ *[a-f0-9]+: 66 f2 48 0f 38 f1 00 data16 crc32q \(%rax\),%rax
+ *[a-f0-9]+: 66 ff 08 decw \(%rax\)
+ *[a-f0-9]+: 66 f7 30 divw \(%rax\)
+ *[a-f0-9]+: 66 d8 00 data16 fadds \(%rax\)
+ *[a-f0-9]+: 66 d8 10 data16 fcoms \(%rax\)
+ *[a-f0-9]+: 66 d8 18 data16 fcomps \(%rax\)
+ *[a-f0-9]+: 66 d8 30 data16 fdivs \(%rax\)
+ *[a-f0-9]+: 66 d8 38 data16 fdivrs \(%rax\)
+ *[a-f0-9]+: 66 de 00 data16 fiadds \(%rax\)
+ *[a-f0-9]+: 66 de 10 data16 ficoms \(%rax\)
+ *[a-f0-9]+: 66 de 18 data16 ficomps \(%rax\)
+ *[a-f0-9]+: 66 de 30 data16 fidivs \(%rax\)
+ *[a-f0-9]+: 66 de 38 data16 fidivrs \(%rax\)
+ *[a-f0-9]+: 66 df 00 data16 filds \(%rax\)
+ *[a-f0-9]+: 66 de 08 data16 fimuls \(%rax\)
+ *[a-f0-9]+: 66 df 10 data16 fists \(%rax\)
+ *[a-f0-9]+: 66 df 18 data16 fistps \(%rax\)
+ *[a-f0-9]+: 66 df 08 data16 fisttps \(%rax\)
+ *[a-f0-9]+: 66 de 20 data16 fisubs \(%rax\)
+ *[a-f0-9]+: 66 de 28 data16 fisubrs \(%rax\)
+ *[a-f0-9]+: 66 d9 00 data16 flds \(%rax\)
+ *[a-f0-9]+: 66 d8 08 data16 fmuls \(%rax\)
+ *[a-f0-9]+: 66 d9 10 data16 fsts \(%rax\)
+ *[a-f0-9]+: 66 d9 18 data16 fstps \(%rax\)
+ *[a-f0-9]+: 66 d8 20 data16 fsubs \(%rax\)
+ *[a-f0-9]+: 66 d8 28 data16 fsubrs \(%rax\)
+ *[a-f0-9]+: 66 f7 38 idivw \(%rax\)
+ *[a-f0-9]+: 66 f7 28 imulw \(%rax\)
+ *[a-f0-9]+: 66 e5 00 in \$0x0,%ax
+ *[a-f0-9]+: 66 ed in \(%dx\),%ax
+ *[a-f0-9]+: 66 ff 00 incw \(%rax\)
+ *[a-f0-9]+: 66 6d insw \(%dx\),%es:\(%rdi\)
+ *[a-f0-9]+: 66 6d insw \(%dx\),%es:\(%rdi\)
+ *[a-f0-9]+: 66 cf iretw *
+ *[a-f0-9]+: 66 ff 20 jmpw \*\(%rax\)
+ *[a-f0-9]+: 66 ff 18 lcallw \*\(%rax\)
+ *[a-f0-9]+: 66 0f 01 10 data16 lgdt \(%rax\)
+ *[a-f0-9]+: 66 0f 01 18 data16 lidt \(%rax\)
+ *[a-f0-9]+: 66 ff 28 ljmpw \*\(%rax\)
+ *[a-f0-9]+: 66 0f 00 10 data16 lldt \(%rax\)
+ *[a-f0-9]+: 66 0f 01 30 data16 lmsw \(%rax\)
+ *[a-f0-9]+: 66 ad lods %ds:\(%rsi\),%ax
+ *[a-f0-9]+: 66 ad lods %ds:\(%rsi\),%ax
+ *[a-f0-9]+: 66 cb lretw *
+ *[a-f0-9]+: 66 ca 04 00 lretw \$0x4
+ *[a-f0-9]+: 66 0f 00 18 data16 ltr \(%rax\)
+ *[a-f0-9]+: 66 c7 00 12 00 movw \$0x12,\(%rax\)
+ *[a-f0-9]+: 66 c7 00 34 12 movw \$0x1234,\(%rax\)
+ *[a-f0-9]+: 66 c7 00 78 56 movw \$0x5678,\(%rax\)
+ *[a-f0-9]+: 66 8c 00 data16 mov %es,\(%rax\)
+ *[a-f0-9]+: 66 8e 00 data16 mov \(%rax\),%es
+ *[a-f0-9]+: 66 a5 movsw %ds:\(%rsi\),%es:\(%rdi\)
+ *[a-f0-9]+: 66 a5 movsw %ds:\(%rsi\),%es:\(%rdi\)
+ *[a-f0-9]+: 66 0f be 00 movsbw \(%rax\),%ax
+ *[a-f0-9]+: 66 48 0f be 00 data16 movsbq \(%rax\),%rax
+ *[a-f0-9]+: 66 0f b6 00 movzbw \(%rax\),%ax
+ *[a-f0-9]+: 66 48 0f b6 00 data16 movzbq \(%rax\),%rax
+ *[a-f0-9]+: 66 f7 20 mulw \(%rax\)
+ *[a-f0-9]+: 66 f7 18 negw \(%rax\)
+ *[a-f0-9]+: 66 0f 1f 00 nopw \(%rax\)
+ *[a-f0-9]+: 66 f7 10 notw \(%rax\)
+ *[a-f0-9]+: 66 83 08 01 orw \$0x1,\(%rax\)
+ *[a-f0-9]+: 66 81 08 89 00 orw \$0x89,\(%rax\)
+ *[a-f0-9]+: 66 81 08 34 12 orw \$0x1234,\(%rax\)
+ *[a-f0-9]+: 66 81 08 78 56 orw \$0x5678,\(%rax\)
+ *[a-f0-9]+: 66 e7 00 out %ax,\$0x0
+ *[a-f0-9]+: 66 ef out %ax,\(%dx\)
+ *[a-f0-9]+: 66 6f outsw %ds:\(%rsi\),\(%dx\)
+ *[a-f0-9]+: 66 6f outsw %ds:\(%rsi\),\(%dx\)
+ *[a-f0-9]+: 66 8f 00 popw \(%rax\)
+ *[a-f0-9]+: 66 0f a1 popw %fs
+ *[a-f0-9]+: 66 ff 30 pushw \(%rax\)
+ *[a-f0-9]+: 66 0f a0 pushw %fs
+ *[a-f0-9]+: 66 d1 10 rclw \(%rax\)
+ *[a-f0-9]+: 66 c1 10 02 rclw \$0x2,\(%rax\)
+ *[a-f0-9]+: 66 d3 10 rclw %cl,\(%rax\)
+ *[a-f0-9]+: 66 d1 10 rclw \(%rax\)
+ *[a-f0-9]+: 66 d1 18 rcrw \(%rax\)
+ *[a-f0-9]+: 66 c1 18 02 rcrw \$0x2,\(%rax\)
+ *[a-f0-9]+: 66 d3 18 rcrw %cl,\(%rax\)
+ *[a-f0-9]+: 66 d1 18 rcrw \(%rax\)
+ *[a-f0-9]+: 66 d1 00 rolw \(%rax\)
+ *[a-f0-9]+: 66 c1 00 02 rolw \$0x2,\(%rax\)
+ *[a-f0-9]+: 66 d3 00 rolw %cl,\(%rax\)
+ *[a-f0-9]+: 66 d1 00 rolw \(%rax\)
+ *[a-f0-9]+: 66 d1 08 rorw \(%rax\)
+ *[a-f0-9]+: 66 c1 08 02 rorw \$0x2,\(%rax\)
+ *[a-f0-9]+: 66 d3 08 rorw %cl,\(%rax\)
+ *[a-f0-9]+: 66 d1 08 rorw \(%rax\)
+ *[a-f0-9]+: 66 83 18 01 sbbw \$0x1,\(%rax\)
+ *[a-f0-9]+: 66 81 18 89 00 sbbw \$0x89,\(%rax\)
+ *[a-f0-9]+: 66 81 18 34 12 sbbw \$0x1234,\(%rax\)
+ *[a-f0-9]+: 66 81 18 78 56 sbbw \$0x5678,\(%rax\)
+ *[a-f0-9]+: 66 af scas %es:\(%rdi\),%ax
+ *[a-f0-9]+: 66 af scas %es:\(%rdi\),%ax
+ *[a-f0-9]+: 66 d1 20 shlw \(%rax\)
+ *[a-f0-9]+: 66 c1 20 02 shlw \$0x2,\(%rax\)
+ *[a-f0-9]+: 66 d3 20 shlw %cl,\(%rax\)
+ *[a-f0-9]+: 66 d1 20 shlw \(%rax\)
+ *[a-f0-9]+: 66 d1 38 sarw \(%rax\)
+ *[a-f0-9]+: 66 c1 38 02 sarw \$0x2,\(%rax\)
+ *[a-f0-9]+: 66 d3 38 sarw %cl,\(%rax\)
+ *[a-f0-9]+: 66 d1 38 sarw \(%rax\)
+ *[a-f0-9]+: 66 d1 20 shlw \(%rax\)
+ *[a-f0-9]+: 66 c1 20 02 shlw \$0x2,\(%rax\)
+ *[a-f0-9]+: 66 d3 20 shlw %cl,\(%rax\)
+ *[a-f0-9]+: 66 d1 20 shlw \(%rax\)
+ *[a-f0-9]+: 66 d1 28 shrw \(%rax\)
+ *[a-f0-9]+: 66 c1 28 02 shrw \$0x2,\(%rax\)
+ *[a-f0-9]+: 66 d3 28 shrw %cl,\(%rax\)
+ *[a-f0-9]+: 66 d1 28 shrw \(%rax\)
+ *[a-f0-9]+: 66 ab stos %ax,%es:\(%rdi\)
+ *[a-f0-9]+: 66 ab stos %ax,%es:\(%rdi\)
+ *[a-f0-9]+: 66 83 28 01 subw \$0x1,\(%rax\)
+ *[a-f0-9]+: 66 81 28 89 00 subw \$0x89,\(%rax\)
+ *[a-f0-9]+: 66 81 28 34 12 subw \$0x1234,\(%rax\)
+ *[a-f0-9]+: 66 81 28 78 56 subw \$0x5678,\(%rax\)
+ *[a-f0-9]+: 66 0f 07 data16 sysret *
+ *[a-f0-9]+: 66 f7 00 89 00 testw \$0x89,\(%rax\)
+ *[a-f0-9]+: 66 f7 00 34 12 testw \$0x1234,\(%rax\)
+ *[a-f0-9]+: 66 f7 00 78 56 testw \$0x5678,\(%rax\)
+ *[a-f0-9]+: 66 83 30 01 xorw \$0x1,\(%rax\)
+ *[a-f0-9]+: 66 81 30 89 00 xorw \$0x89,\(%rax\)
+ *[a-f0-9]+: 66 81 30 34 12 xorw \$0x1234,\(%rax\)
+ *[a-f0-9]+: 66 81 30 78 56 xorw \$0x5678,\(%rax\)
+#pass
--- /dev/null
+.*: Assembler messages:
+.*:30: Warning: .*shortened.*
+.*:34: Warning: .*shortened.*
+.*:38: Warning: .*shortened.*
+.*:47: Warning: .*shortened.*
+.*:99: Warning: .*shortened.*
+.*:117: Warning: .*shortened.*
+.*:146: Warning: .*shortened.*
+.*:170: Warning: .*shortened.*
+.*:174: Warning: .*shortened.*
+.*:178: Warning: .*shortened.*
--- /dev/null
+#as: --defsym REX64=1
+#objdump: -dw
+#name: 64-bit insns not sizeable through register operands w/ rex64
+#source: noreg64.s
+
+.*: +file format .*
+
+Disassembly of section .text:
+
+0+ <noreg>:
+ *[a-f0-9]+: 48 83 10 01 adcq \$0x1,\(%rax\)
+ *[a-f0-9]+: 48 81 10 89 00 00 00 adcq \$0x89,\(%rax\)
+ *[a-f0-9]+: 48 81 10 34 12 00 00 adcq \$0x1234,\(%rax\)
+ *[a-f0-9]+: 48 81 10 78 56 34 12 adcq \$0x12345678,\(%rax\)
+ *[a-f0-9]+: 48 83 00 01 addq \$0x1,\(%rax\)
+ *[a-f0-9]+: 48 81 00 89 00 00 00 addq \$0x89,\(%rax\)
+ *[a-f0-9]+: 48 81 00 34 12 00 00 addq \$0x1234,\(%rax\)
+ *[a-f0-9]+: 48 81 00 78 56 34 12 addq \$0x12345678,\(%rax\)
+ *[a-f0-9]+: 48 83 20 01 andq \$0x1,\(%rax\)
+ *[a-f0-9]+: 48 81 20 89 00 00 00 andq \$0x89,\(%rax\)
+ *[a-f0-9]+: 48 81 20 34 12 00 00 andq \$0x1234,\(%rax\)
+ *[a-f0-9]+: 48 81 20 78 56 34 12 andq \$0x12345678,\(%rax\)
+ *[a-f0-9]+: 48 0f ba 20 01 btq \$0x1,\(%rax\)
+ *[a-f0-9]+: 48 0f ba 38 01 btcq \$0x1,\(%rax\)
+ *[a-f0-9]+: 48 0f ba 30 01 btrq \$0x1,\(%rax\)
+ *[a-f0-9]+: 48 0f ba 28 01 btsq \$0x1,\(%rax\)
+ *[a-f0-9]+: 48 ff 10 rex\.W callq \*\(%rax\)
+ *[a-f0-9]+: 48 83 38 01 cmpq \$0x1,\(%rax\)
+ *[a-f0-9]+: 48 81 38 89 00 00 00 cmpq \$0x89,\(%rax\)
+ *[a-f0-9]+: 48 81 38 34 12 00 00 cmpq \$0x1234,\(%rax\)
+ *[a-f0-9]+: 48 81 38 78 56 34 12 cmpq \$0x12345678,\(%rax\)
+ *[a-f0-9]+: 48 a7 cmpsq %es:\(%rdi\),%ds:\(%rsi\)
+ *[a-f0-9]+: 48 a7 cmpsq %es:\(%rdi\),%ds:\(%rsi\)
+ *[a-f0-9]+: f2 48 0f 38 f1 00 crc32q \(%rax\),%rax
+ *[a-f0-9]+: 48 ff 08 decq \(%rax\)
+ *[a-f0-9]+: 48 f7 30 divq \(%rax\)
+ *[a-f0-9]+: 48 d8 00 rex\.W fadds \(%rax\)
+ *[a-f0-9]+: 48 d8 10 rex\.W fcoms \(%rax\)
+ *[a-f0-9]+: 48 d8 18 rex\.W fcomps \(%rax\)
+ *[a-f0-9]+: 48 d8 30 rex\.W fdivs \(%rax\)
+ *[a-f0-9]+: 48 d8 38 rex\.W fdivrs \(%rax\)
+ *[a-f0-9]+: 48 de 00 rex\.W fiadds \(%rax\)
+ *[a-f0-9]+: 48 de 10 rex\.W ficoms \(%rax\)
+ *[a-f0-9]+: 48 de 18 rex\.W ficomps \(%rax\)
+ *[a-f0-9]+: 48 de 30 rex\.W fidivs \(%rax\)
+ *[a-f0-9]+: 48 de 38 rex\.W fidivrs \(%rax\)
+ *[a-f0-9]+: 48 df 00 rex\.W filds \(%rax\)
+ *[a-f0-9]+: 48 de 08 rex\.W fimuls \(%rax\)
+ *[a-f0-9]+: 48 df 10 rex\.W fists \(%rax\)
+ *[a-f0-9]+: 48 df 18 rex\.W fistps \(%rax\)
+ *[a-f0-9]+: 48 df 08 rex\.W fisttps \(%rax\)
+ *[a-f0-9]+: 48 de 20 rex\.W fisubs \(%rax\)
+ *[a-f0-9]+: 48 de 28 rex\.W fisubrs \(%rax\)
+ *[a-f0-9]+: 48 d9 00 rex\.W flds \(%rax\)
+ *[a-f0-9]+: 48 d8 08 rex\.W fmuls \(%rax\)
+ *[a-f0-9]+: 48 d9 10 rex\.W fsts \(%rax\)
+ *[a-f0-9]+: 48 d9 18 rex\.W fstps \(%rax\)
+ *[a-f0-9]+: 48 d8 20 rex\.W fsubs \(%rax\)
+ *[a-f0-9]+: 48 d8 28 rex\.W fsubrs \(%rax\)
+ *[a-f0-9]+: 48 f7 38 idivq \(%rax\)
+ *[a-f0-9]+: 48 f7 28 imulq \(%rax\)
+ *[a-f0-9]+: 48 e5 00 rex\.W in \$0x0,%eax
+ *[a-f0-9]+: 48 ed rex\.W in \(%dx\),%eax
+ *[a-f0-9]+: 48 ff 00 incq \(%rax\)
+ *[a-f0-9]+: 48 6d rex\.W insl \(%dx\),%es:\(%rdi\)
+ *[a-f0-9]+: 48 6d rex\.W insl \(%dx\),%es:\(%rdi\)
+ *[a-f0-9]+: 48 cf iretq *
+ *[a-f0-9]+: 48 ff 20 rex\.W jmpq \*\(%rax\)
+ *[a-f0-9]+: 48 ff 18 rex\.W lcall \*\(%rax\)
+ *[a-f0-9]+: 48 0f 01 10 rex\.W lgdt \(%rax\)
+ *[a-f0-9]+: 48 0f 01 18 rex\.W lidt \(%rax\)
+ *[a-f0-9]+: 48 ff 28 rex\.W ljmp \*\(%rax\)
+ *[a-f0-9]+: 48 0f 00 10 rex\.W lldt \(%rax\)
+ *[a-f0-9]+: 48 0f 01 30 rex\.W lmsw \(%rax\)
+ *[a-f0-9]+: 48 ad lods %ds:\(%rsi\),%rax
+ *[a-f0-9]+: 48 ad lods %ds:\(%rsi\),%rax
+ *[a-f0-9]+: 48 cb lretq *
+ *[a-f0-9]+: 48 ca 04 00 lretq \$0x4
+ *[a-f0-9]+: 48 0f 00 18 rex\.W ltr \(%rax\)
+ *[a-f0-9]+: 48 c7 00 12 00 00 00 movq \$0x12,\(%rax\)
+ *[a-f0-9]+: 48 c7 00 34 12 00 00 movq \$0x1234,\(%rax\)
+ *[a-f0-9]+: 48 c7 00 78 56 34 12 movq \$0x12345678,\(%rax\)
+ *[a-f0-9]+: 48 8c 00 rex\.W mov %es,\(%rax\)
+ *[a-f0-9]+: 48 8e 00 rex\.W mov \(%rax\),%es
+ *[a-f0-9]+: 48 a5 movsq %ds:\(%rsi\),%es:\(%rdi\)
+ *[a-f0-9]+: 48 a5 movsq %ds:\(%rsi\),%es:\(%rdi\)
+ *[a-f0-9]+: 66 48 0f be 00 data16 movsbq \(%rax\),%rax
+ *[a-f0-9]+: 48 0f be 00 movsbq \(%rax\),%rax
+ *[a-f0-9]+: 66 48 0f b6 00 data16 movzbq \(%rax\),%rax
+ *[a-f0-9]+: 48 0f b6 00 movzbq \(%rax\),%rax
+ *[a-f0-9]+: 48 f7 20 mulq \(%rax\)
+ *[a-f0-9]+: 48 f7 18 negq \(%rax\)
+ *[a-f0-9]+: 48 0f 1f 00 nopq \(%rax\)
+ *[a-f0-9]+: 48 f7 10 notq \(%rax\)
+ *[a-f0-9]+: 48 83 08 01 orq \$0x1,\(%rax\)
+ *[a-f0-9]+: 48 81 08 89 00 00 00 orq \$0x89,\(%rax\)
+ *[a-f0-9]+: 48 81 08 34 12 00 00 orq \$0x1234,\(%rax\)
+ *[a-f0-9]+: 48 81 08 78 56 34 12 orq \$0x12345678,\(%rax\)
+ *[a-f0-9]+: 48 e7 00 rex\.W out %eax,\$0x0
+ *[a-f0-9]+: 48 ef rex\.W out %eax,\(%dx\)
+ *[a-f0-9]+: 48 6f rex\.W outsl %ds:\(%rsi\),\(%dx\)
+ *[a-f0-9]+: 48 6f rex\.W outsl %ds:\(%rsi\),\(%dx\)
+ *[a-f0-9]+: 48 8f 00 rex\.W popq \(%rax\)
+ *[a-f0-9]+: 48 0f a1 rex\.W popq %fs
+ *[a-f0-9]+: f3 48 0f ae 20 ptwriteq \(%rax\)
+ *[a-f0-9]+: 48 ff 30 rex\.W pushq \(%rax\)
+ *[a-f0-9]+: 48 0f a0 rex\.W pushq %fs
+ *[a-f0-9]+: 48 d1 10 rclq \(%rax\)
+ *[a-f0-9]+: 48 c1 10 02 rclq \$0x2,\(%rax\)
+ *[a-f0-9]+: 48 d3 10 rclq %cl,\(%rax\)
+ *[a-f0-9]+: 48 d1 10 rclq \(%rax\)
+ *[a-f0-9]+: 48 d1 18 rcrq \(%rax\)
+ *[a-f0-9]+: 48 c1 18 02 rcrq \$0x2,\(%rax\)
+ *[a-f0-9]+: 48 d3 18 rcrq %cl,\(%rax\)
+ *[a-f0-9]+: 48 d1 18 rcrq \(%rax\)
+ *[a-f0-9]+: 48 d1 00 rolq \(%rax\)
+ *[a-f0-9]+: 48 c1 00 02 rolq \$0x2,\(%rax\)
+ *[a-f0-9]+: 48 d3 00 rolq %cl,\(%rax\)
+ *[a-f0-9]+: 48 d1 00 rolq \(%rax\)
+ *[a-f0-9]+: 48 d1 08 rorq \(%rax\)
+ *[a-f0-9]+: 48 c1 08 02 rorq \$0x2,\(%rax\)
+ *[a-f0-9]+: 48 d3 08 rorq %cl,\(%rax\)
+ *[a-f0-9]+: 48 d1 08 rorq \(%rax\)
+ *[a-f0-9]+: 48 83 18 01 sbbq \$0x1,\(%rax\)
+ *[a-f0-9]+: 48 81 18 89 00 00 00 sbbq \$0x89,\(%rax\)
+ *[a-f0-9]+: 48 81 18 34 12 00 00 sbbq \$0x1234,\(%rax\)
+ *[a-f0-9]+: 48 81 18 78 56 34 12 sbbq \$0x12345678,\(%rax\)
+ *[a-f0-9]+: 48 af scas %es:\(%rdi\),%rax
+ *[a-f0-9]+: 48 af scas %es:\(%rdi\),%rax
+ *[a-f0-9]+: 48 d1 20 shlq \(%rax\)
+ *[a-f0-9]+: 48 c1 20 02 shlq \$0x2,\(%rax\)
+ *[a-f0-9]+: 48 d3 20 shlq %cl,\(%rax\)
+ *[a-f0-9]+: 48 d1 20 shlq \(%rax\)
+ *[a-f0-9]+: 48 d1 38 sarq \(%rax\)
+ *[a-f0-9]+: 48 c1 38 02 sarq \$0x2,\(%rax\)
+ *[a-f0-9]+: 48 d3 38 sarq %cl,\(%rax\)
+ *[a-f0-9]+: 48 d1 38 sarq \(%rax\)
+ *[a-f0-9]+: 48 d1 20 shlq \(%rax\)
+ *[a-f0-9]+: 48 c1 20 02 shlq \$0x2,\(%rax\)
+ *[a-f0-9]+: 48 d3 20 shlq %cl,\(%rax\)
+ *[a-f0-9]+: 48 d1 20 shlq \(%rax\)
+ *[a-f0-9]+: 48 d1 28 shrq \(%rax\)
+ *[a-f0-9]+: 48 c1 28 02 shrq \$0x2,\(%rax\)
+ *[a-f0-9]+: 48 d3 28 shrq %cl,\(%rax\)
+ *[a-f0-9]+: 48 d1 28 shrq \(%rax\)
+ *[a-f0-9]+: 48 ab stos %rax,%es:\(%rdi\)
+ *[a-f0-9]+: 48 ab stos %rax,%es:\(%rdi\)
+ *[a-f0-9]+: 48 83 28 01 subq \$0x1,\(%rax\)
+ *[a-f0-9]+: 48 81 28 89 00 00 00 subq \$0x89,\(%rax\)
+ *[a-f0-9]+: 48 81 28 34 12 00 00 subq \$0x1234,\(%rax\)
+ *[a-f0-9]+: 48 81 28 78 56 34 12 subq \$0x12345678,\(%rax\)
+ *[a-f0-9]+: 48 0f 07 sysretq *
+ *[a-f0-9]+: 48 f7 00 89 00 00 00 testq \$0x89,\(%rax\)
+ *[a-f0-9]+: 48 f7 00 34 12 00 00 testq \$0x1234,\(%rax\)
+ *[a-f0-9]+: 48 f7 00 78 56 34 12 testq \$0x12345678,\(%rax\)
+ *[a-f0-9]+: 48 83 30 01 xorq \$0x1,\(%rax\)
+ *[a-f0-9]+: 48 81 30 89 00 00 00 xorq \$0x89,\(%rax\)
+ *[a-f0-9]+: 48 81 30 34 12 00 00 xorq \$0x1234,\(%rax\)
+ *[a-f0-9]+: 48 81 30 78 56 34 12 xorq \$0x12345678,\(%rax\)
+#pass
+ .macro pfx insn:vararg
+ .ifdef DATA16
+ data16 \insn
+ .else
+ .ifdef REX64
+ rex64 \insn
+ .else
+ \insn
+ .endif
+ .endif
+ .endm
+
+ .macro pfx16 insn:vararg
+ .ifndef REX64
+ pfx \insn
+ .endif
+ .endm
+
+ .macro pfx64 insn:vararg
+ .ifndef DATA16
+ pfx \insn
+ .endif
+ .endm
+
.text
noreg:
- adc $1, (%rax)
- adc $0x89, (%rax)
- adc $0x1234, (%rax)
- adc $0x12345678, (%rax)
- add $1, (%rax)
- add $0x89, (%rax)
- add $0x1234, (%rax)
- add $0x12345678, (%rax)
- and $1, (%rax)
- and $0x89, (%rax)
- and $0x1234, (%rax)
- and $0x12345678, (%rax)
- bt $1, (%rax)
- btc $1, (%rax)
- btr $1, (%rax)
- bts $1, (%rax)
- call *(%rax)
- cmp $1, (%rax)
- cmp $0x89, (%rax)
- cmp $0x1234, (%rax)
- cmp $0x12345678, (%rax)
- cmps
- cmps %es:(%rdi), (%rsi)
- crc32 (%rax), %eax
- crc32 (%rax), %rax
- dec (%rax)
- div (%rax)
- fadd (%rax)
- fcom (%rax)
- fcomp (%rax)
- fdiv (%rax)
- fdivr (%rax)
- fiadd (%rax)
- ficom (%rax)
- ficomp (%rax)
- fidiv (%rax)
- fidivr (%rax)
- fild (%rax)
- fimul (%rax)
- fist (%rax)
- fistp (%rax)
- fisttp (%rax)
- fisub (%rax)
- fisubr (%rax)
- fld (%rax)
- fmul (%rax)
- fst (%rax)
- fstp (%rax)
- fsub (%rax)
- fsubr (%rax)
- idiv (%rax)
- imul (%rax)
- in $0
- in %dx
- inc (%rax)
- ins
- ins %dx, %es:(%rdi)
- iret
- jmp *(%rax)
- lcall *(%rax)
- lgdt (%rax)
- lidt (%rax)
- ljmp *(%rax)
- lldt (%rax)
- lmsw (%rax)
- lods
- lods (%rsi)
- lret
- lret $4
- ltr (%rax)
- mov $0x12, (%rax)
- mov $0x1234, (%rax)
- mov $0x12345678, (%rax)
- mov %es, (%rax)
- mov (%rax), %es
- movs
- movs (%rsi), %es:(%rdi)
- movsx (%rax), %ax
- movsx (%rax), %eax
- movsx (%rax), %rax
- movzx (%rax), %ax
- movzx (%rax), %eax
- movzx (%rax), %rax
- mul (%rax)
- neg (%rax)
- nop (%rax)
- not (%rax)
- or $1, (%rax)
- or $0x89, (%rax)
- or $0x1234, (%rax)
- or $0x12345678, (%rax)
- out $0
- out %dx
- outs
- outs (%rsi), %dx
- pop (%rax)
- pop %fs
- ptwrite (%rax)
- push (%rax)
- push %fs
- rcl $1, (%rax)
- rcl $2, (%rax)
- rcl %cl, (%rax)
- rcl (%rax)
- rcr $1, (%rax)
- rcr $2, (%rax)
- rcr %cl, (%rax)
- rcr (%rax)
- rol $1, (%rax)
- rol $2, (%rax)
- rol %cl, (%rax)
- rol (%rax)
- ror $1, (%rax)
- ror $2, (%rax)
- ror %cl, (%rax)
- ror (%rax)
- sbb $1, (%rax)
- sbb $0x89, (%rax)
- sbb $0x1234, (%rax)
- sbb $0x12345678, (%rax)
- scas
- scas %es:(%rdi)
- sal $1, (%rax)
- sal $2, (%rax)
- sal %cl, (%rax)
- sal (%rax)
- sar $1, (%rax)
- sar $2, (%rax)
- sar %cl, (%rax)
- sar (%rax)
- shl $1, (%rax)
- shl $2, (%rax)
- shl %cl, (%rax)
- shl (%rax)
- shr $1, (%rax)
- shr $2, (%rax)
- shr %cl, (%rax)
- shr (%rax)
- stos
- stos %es:(%rdi)
- sub $1, (%rax)
- sub $0x89, (%rax)
- sub $0x1234, (%rax)
- sub $0x12345678, (%rax)
- sysret
- test $0x89, (%rax)
- test $0x1234, (%rax)
- test $0x12345678, (%rax)
- xor $1, (%rax)
- xor $0x89, (%rax)
- xor $0x1234, (%rax)
- xor $0x12345678, (%rax)
+ pfx adc $1, (%rax)
+ pfx adc $0x89, (%rax)
+ pfx adc $0x1234, (%rax)
+ pfx adc $0x12345678, (%rax)
+ pfx add $1, (%rax)
+ pfx add $0x89, (%rax)
+ pfx add $0x1234, (%rax)
+ pfx add $0x12345678, (%rax)
+ pfx and $1, (%rax)
+ pfx and $0x89, (%rax)
+ pfx and $0x1234, (%rax)
+ pfx and $0x12345678, (%rax)
+ pfx bt $1, (%rax)
+ pfx btc $1, (%rax)
+ pfx btr $1, (%rax)
+ pfx bts $1, (%rax)
+ pfx call *(%rax)
+ pfx cmp $1, (%rax)
+ pfx cmp $0x89, (%rax)
+ pfx cmp $0x1234, (%rax)
+ pfx cmp $0x12345678, (%rax)
+ pfx cmps
+ pfx cmps %es:(%rdi), (%rsi)
+ pfx crc32 (%rax), %eax
+ pfx16 crc32 (%rax), %rax
+ pfx dec (%rax)
+ pfx div (%rax)
+ pfx fadd (%rax)
+ pfx fcom (%rax)
+ pfx fcomp (%rax)
+ pfx fdiv (%rax)
+ pfx fdivr (%rax)
+ pfx fiadd (%rax)
+ pfx ficom (%rax)
+ pfx ficomp (%rax)
+ pfx fidiv (%rax)
+ pfx fidivr (%rax)
+ pfx fild (%rax)
+ pfx fimul (%rax)
+ pfx fist (%rax)
+ pfx fistp (%rax)
+ pfx fisttp (%rax)
+ pfx fisub (%rax)
+ pfx fisubr (%rax)
+ pfx fld (%rax)
+ pfx fmul (%rax)
+ pfx fst (%rax)
+ pfx fstp (%rax)
+ pfx fsub (%rax)
+ pfx fsubr (%rax)
+ pfx idiv (%rax)
+ pfx imul (%rax)
+ pfx in $0
+ pfx in %dx
+ pfx inc (%rax)
+ pfx ins
+ pfx ins %dx, %es:(%rdi)
+ pfx iret
+ pfx jmp *(%rax)
+ pfx lcall *(%rax)
+ pfx lgdt (%rax)
+ pfx lidt (%rax)
+ pfx ljmp *(%rax)
+ pfx lldt (%rax)
+ pfx lmsw (%rax)
+ pfx lods
+ pfx lods (%rsi)
+ pfx lret
+ pfx lret $4
+ pfx ltr (%rax)
+ pfx mov $0x12, (%rax)
+ pfx mov $0x1234, (%rax)
+ pfx mov $0x12345678, (%rax)
+ pfx mov %es, (%rax)
+ pfx mov (%rax), %es
+ pfx movs
+ pfx movs (%rsi), %es:(%rdi)
+ pfx64 movsx (%rax), %ax
+ pfx movsx (%rax), %eax
+ pfx16 movsx (%rax), %rax
+ pfx64 movzx (%rax), %ax
+ pfx movzx (%rax), %eax
+ pfx16 movzx (%rax), %rax
+ pfx mul (%rax)
+ pfx neg (%rax)
+ pfx nop (%rax)
+ pfx not (%rax)
+ pfx or $1, (%rax)
+ pfx or $0x89, (%rax)
+ pfx or $0x1234, (%rax)
+ pfx or $0x12345678, (%rax)
+ pfx out $0
+ pfx out %dx
+ pfx outs
+ pfx outs (%rsi), %dx
+ pfx pop (%rax)
+ pfx pop %fs
+ pfx64 ptwrite (%rax)
+ pfx push (%rax)
+ pfx push %fs
+ pfx rcl $1, (%rax)
+ pfx rcl $2, (%rax)
+ pfx rcl %cl, (%rax)
+ pfx rcl (%rax)
+ pfx rcr $1, (%rax)
+ pfx rcr $2, (%rax)
+ pfx rcr %cl, (%rax)
+ pfx rcr (%rax)
+ pfx rol $1, (%rax)
+ pfx rol $2, (%rax)
+ pfx rol %cl, (%rax)
+ pfx rol (%rax)
+ pfx ror $1, (%rax)
+ pfx ror $2, (%rax)
+ pfx ror %cl, (%rax)
+ pfx ror (%rax)
+ pfx sbb $1, (%rax)
+ pfx sbb $0x89, (%rax)
+ pfx sbb $0x1234, (%rax)
+ pfx sbb $0x12345678, (%rax)
+ pfx scas
+ pfx scas %es:(%rdi)
+ pfx sal $1, (%rax)
+ pfx sal $2, (%rax)
+ pfx sal %cl, (%rax)
+ pfx sal (%rax)
+ pfx sar $1, (%rax)
+ pfx sar $2, (%rax)
+ pfx sar %cl, (%rax)
+ pfx sar (%rax)
+ pfx shl $1, (%rax)
+ pfx shl $2, (%rax)
+ pfx shl %cl, (%rax)
+ pfx shl (%rax)
+ pfx shr $1, (%rax)
+ pfx shr $2, (%rax)
+ pfx shr %cl, (%rax)
+ pfx shr (%rax)
+ pfx stos
+ pfx stos %es:(%rdi)
+ pfx sub $1, (%rax)
+ pfx sub $0x89, (%rax)
+ pfx sub $0x1234, (%rax)
+ pfx sub $0x12345678, (%rax)
+ pfx sysret
+ pfx test $0x89, (%rax)
+ pfx test $0x1234, (%rax)
+ pfx test $0x12345678, (%rax)
+ pfx xor $1, (%rax)
+ pfx xor $0x89, (%rax)
+ pfx xor $0x1234, (%rax)
+ pfx xor $0x12345678, (%rax)