/* Check for lock without a lockable instruction. Destination operand
must be memory unless it is xchg (0x86). */
- if (i.prefix[LOCK_PREFIX]
- && (i.tm.opcode_modifier.prefixok < PrefixLock
+ if (i.prefix[LOCK_PREFIX])
+ {
+ if (i.tm.opcode_modifier.prefixok < PrefixLock
|| i.mem_operands == 0
|| (i.tm.base_opcode != 0x86
- && !(i.flags[i.operands - 1] & Operand_Mem))))
- {
- as_bad (_("expecting lockable instruction after `lock'"));
- return;
+ && !(i.flags[i.operands - 1] & Operand_Mem)))
+ {
+ as_bad (_("expecting lockable instruction after `lock'"));
+ return;
+ }
+
+ /* Zap the redundant prefix from XCHG when optimizing. */
+ if (i.tm.base_opcode == 0x86 && optimize && !i.no_optimize)
+ i.prefix[LOCK_PREFIX] = 0;
}
if (is_any_vex_encoding (&i.tm)
+[a-f0-9]+: 08 e4 or %ah,%ah
+[a-f0-9]+: 66 09 ed or %bp,%bp
+[a-f0-9]+: 09 f6 or %esi,%esi
+ +[a-f0-9]+: 87 0a xchg %ecx,\(%edx\)
+ +[a-f0-9]+: 87 11 xchg %edx,\(%ecx\)
+[a-f0-9]+: c5 f1 55 e9 vandnpd %xmm1,%xmm1,%xmm5
+[a-f0-9]+: c5 f9 6f d1 vmovdqa %xmm1,%xmm2
+[a-f0-9]+: c5 f9 6f d1 vmovdqa %xmm1,%xmm2
or %bp, %bp
or %esi, %esi
+ lock xchg %ecx, (%edx)
+ lock xchg (%ecx), %edx
+
vandnpd %zmm1, %zmm1, %zmm5
vmovdqa32 %xmm1, %xmm2
+[a-f0-9]+: 84 e4 test %ah,%ah
+[a-f0-9]+: 66 85 ed test %bp,%bp
+[a-f0-9]+: 85 f6 test %esi,%esi
+ +[a-f0-9]+: 87 0a xchg %ecx,\(%edx\)
+ +[a-f0-9]+: 87 11 xchg %edx,\(%ecx\)
+[a-f0-9]+: c5 f1 55 e9 vandnpd %xmm1,%xmm1,%xmm5
+[a-f0-9]+: c5 f9 6f d1 vmovdqa %xmm1,%xmm2
+[a-f0-9]+: c5 f9 6f d1 vmovdqa %xmm1,%xmm2
0+ <_start>:
+[a-f0-9]+: a9 7f 00 00 00 test \$0x7f,%eax
+ +[a-f0-9]+: f0 87 0a lock xchg %ecx,\(%edx\)
+ +[a-f0-9]+: f0 87 11 lock xchg %edx,\(%ecx\)
+[a-f0-9]+: 62 f1 7d 28 6f d1 vmovdqa32 %ymm1,%ymm2
+[a-f0-9]+: 62 f1 fd 28 6f d1 vmovdqa64 %ymm1,%ymm2
+[a-f0-9]+: 62 f1 7f 08 6f d1 vmovdqu8 %xmm1,%xmm2
_start:
{nooptimize} testl $0x7f, %eax
+ {nooptimize} lock xchg %ecx, (%edx)
+ {nooptimize} lock xchg (%ecx), %edx
+
{nooptimize} vmovdqa32 %ymm1, %ymm2
{nooptimize} vmovdqa64 %ymm1, %ymm2
{nooptimize} vmovdqu8 %xmm1, %xmm2