+2011-01-16 H.J. Lu <hongjiu.lu@intel.com>
+
+ * config/tc-i386.c (disallow_64bit_disp): Renamed to ...
+ (disallow_64bit_reloc): This.
+ (md_assemble): Don't check movabs for x32 mode here.
+ (i386_target_format): Updated.
+ (tc_gen_reloc): Check if 64bit relocations are allowed.
+
2011-01-15 H.J. Lu <hongjiu.lu@intel.com>
* config/tc-i386.c (disallow_64bit_disp): New.
static enum flag_code flag_code;
static unsigned int object_64bit;
-static unsigned int disallow_64bit_disp;
+static unsigned int disallow_64bit_reloc;
static int use_rela_relocations = 0;
#if ((defined (OBJ_MAYBE_COFF) && defined (OBJ_MAYBE_AOUT)) \
/* Don't optimize displacement for movabs since it only takes 64bit
displacement. */
if (i.disp_operands
- && !i.disp32_encoding)
- {
- if (flag_code == CODE_64BIT)
- {
- if (strcmp (mnemonic, "movabs") == 0)
- {
- if (disallow_64bit_disp)
- as_bad (_("'movabs' isn't supported in x32 mode"));
- }
- else
- optimize_disp ();
- }
- else
- optimize_disp ();
- }
+ && !i.disp32_encoding
+ && (flag_code != CODE_64BIT
+ || strcmp (mnemonic, "movabs") != 0))
+ optimize_disp ();
/* Next, we find a template that matches the given insn,
making sure the overlap of the given operands types is consistent
case X86_64_X32_ABI:
use_rela_relocations = 1;
object_64bit = 1;
- disallow_64bit_disp = 1;
+ disallow_64bit_reloc = 1;
format = ELF_TARGET_FORMAT32;
break;
}
/* Use the rela in 64bit mode. */
else
{
+ if (disallow_64bit_reloc)
+ switch (code)
+ {
+ case BFD_RELOC_64:
+ case BFD_RELOC_X86_64_DTPOFF64:
+ case BFD_RELOC_X86_64_TPOFF64:
+ case BFD_RELOC_64_PCREL:
+ case BFD_RELOC_X86_64_GOTOFF64:
+ case BFD_RELOC_X86_64_GOT64:
+ case BFD_RELOC_X86_64_GOTPCREL64:
+ case BFD_RELOC_X86_64_GOTPC64:
+ case BFD_RELOC_X86_64_GOTPLT64:
+ case BFD_RELOC_X86_64_PLTOFF64:
+ as_bad_where (fixp->fx_file, fixp->fx_line,
+ _("cannot represent relocation type %s in x32 mode"),
+ bfd_get_reloc_code_name (code));
+ break;
+ default:
+ break;
+ }
+
if (!fixp->fx_pcrel)
rel->addend = fixp->fx_offset;
else
+2011-01-16 H.J. Lu <hongjiu.lu@intel.com>
+
+ * gas/i386/ilp32/immed64.s: New.
+ * gas/i386/ilp32/reloc64.s: Likewise.
+ * gas/i386/ilp32/x86-64-pcrel.s: Likewise.
+
+ * gas/i386/ilp32/inval.s: Add more tests.
+
+ * gas/i386/ilp32/immed64.d: Updated.
+ * gas/i386/ilp32/inval.l: Likewise.
+ * gas/i386/ilp32/reloc64.d: Likewise.
+ * gas/i386/ilp32/x86-64-pcrel.d: Likewise.
+
2011-01-15 H.J. Lu <hongjiu.lu@intel.com>
* gas/i386/ilp32/ilp32.exp: Run inval.
-#source: ../immed64.s
#objdump: -dw
#name: x86-64 (ILP32) immed
[ ]*[0-9a-fA-F]+:[ ]+b8 00 00 00 00[ ]+movl? +\$0x0,%eax
[ ]*[0-9a-fA-F]+:[ ]+48 b8 04 00 00 00 00 00 00 00[ ]+movabsq? +\$0x4,%rax
[ ]*[0-9a-fA-F]+:[ ]+48 b8 08 00 00 00 00 00 00 00[ ]+movabsq? +\$0x8,%rax
-[ ]*[0-9a-fA-F]+:[ ]+48 b8 00 00 00 00 00 00 00 00[ ]+movabsq? +\$0x0,%rax
[ ]*[0-9a-fA-F]+:[ ]+04 04[ ]+addb? +\$0x4,%al
[ ]*[0-9a-fA-F]+:[ ]+04 08[ ]+addb? +\$0x8,%al
[ ]*[0-9a-fA-F]+:[ ]+04 00[ ]+addb? +\$0x0,%al
--- /dev/null
+ .equiv early, 4
+
+_start:
+ callq *early(%rax)
+ callq *late(%rax)
+ callq *xtrn(%rax)
+ callq *early(%eax)
+ callq *late(%eax)
+ callq *xtrn(%eax)
+ movb $early, %al
+ movb $late, %al
+ movb $xtrn, %al
+ movw $early, %ax
+ movw $late, %ax
+ movw $xtrn, %ax
+ movl $early, %eax
+ movl $late, %eax
+ movl $xtrn, %eax
+ movabsq $early, %rax
+ movabsq $late, %rax
+ addb $early, %al
+ addb $late, %al
+ addb $xtrn, %al
+ addw $early, %ax
+ addw $late, %ax
+ addw $xtrn, %ax
+ addl $early, %eax
+ addl $late, %eax
+ addl $xtrn, %eax
+ addq $early, %rax
+ addq $late, %rax
+ addq $xtrn, %rax
+ shlb $early, %al
+ shlb $late, %al
+ shlb $xtrn, %al
+ shlw $early, %ax
+ shlw $late, %ax
+ shlw $xtrn, %ax
+ shll $early, %eax
+ shll $late, %eax
+ shll $xtrn, %eax
+ shlq $early, %rax
+ shlq $late, %rax
+ shlq $xtrn, %rax
+ inb $early, %al
+ inb $late, %al
+ inb $xtrn, %al
+ inw $early, %ax
+ inw $late, %ax
+ inw $xtrn, %ax
+ inl $early, %eax
+ inl $late, %eax
+ inl $xtrn, %eax
+
+ .equiv late, 8
+
+ movabs $0x80000001, %rax
+ movabsq $0x80000001, %rax
.*: Assembler messages:
.*:3: Error: .*
.*:4: Error: .*
+.*:5: Error: .*
+.*:6: Error: .*
+.*:10: Error: .*
GAS LISTING .*
[ ]*1[ ]+\.text
[ ]*2[ ]+\# All the following should be illegal for x32
[ ]*3[ ]+\?\?\?\? 48A10000 movabs xxx,%rax
-\*\*\*\* Error:'movabs' isn't supported in x[ ]*32[ ]+mode
[ ]*3[ ]+00000000
[ ]*3[ ]+0000
[ ]*4[ ]+\?\?\?\? 48A10000 movabs foo,%rax
-\*\*\*\* Error:'movabs' isn't supported in x[ ]*32[ ]+mode
[ ]*4[ ]+00000000
[ ]*4[ ]+0000
-[ ]*5[ ]+
-[ ]*6[ ]+\.data
-[ ]*7[ ]+xxx:
-[ ]*8[ ]+\?\?\?\? 00 \.byte 0
+[ ]*5[ ]+\?\?\?\? 48A10000 movabsq xxx,%rax
+[ ]*5[ ]+00000000
+[ ]*5[ ]+0000
+[ ]*6[ ]+\?\?\?\? 48A10000 movabsq foo,%rax
+[ ]*6[ ]+00000000
+[ ]*6[ ]+0000
+[ ]*7[ ]+
+[ ]*8[ ]+\.data
+[ ]*9[ ]+xxx:
+[ ]*10[ ]+\?\?\?\? 00000000 \.quad foo
+\*\*\*\* Error:cannot represent relocation type BFD_RELOC_[ ]*64[ ]+in x32 mode
+[ ]*10[ ]+00000000
# All the following should be illegal for x32
movabs xxx,%rax
movabs foo,%rax
+ movabsq xxx,%rax
+ movabsq foo,%rax
.data
xxx:
- .byte 0
+ .quad foo
-#source: ../reloc64.s
#objdump: -Drw
#name: x86-64 (ILP32) relocs
Disassembly of section \.text:
#...
-.*[ ]+R_X86_64_64[ ]+xtrn
.*[ ]+R_X86_64_32S[ ]+xtrn
.*[ ]+R_X86_64_32[ ]+xtrn
.*[ ]+R_X86_64_16[ ]+xtrn
.*[ ]+R_X86_64_8[ ]+xtrn
.*[ ]+R_X86_64_32S[ ]+xtrn
.*[ ]+R_X86_64_32[ ]+xtrn
-.*[ ]+R_X86_64_PC64[ ]+xtrn\+0x0*2
.*[ ]+R_X86_64_PC32[ ]+xtrn\+0x0*2
.*[ ]+R_X86_64_PC16[ ]+xtrn\+0x0*2
.*[ ]+R_X86_64_PC8[ ]+xtrn\+0x0*1
.*[ ]+R_X86_64_PC32[ ]+xtrn\+0xf+c
.*[ ]+R_X86_64_PC32[ ]+xtrn\+0xf+c
.*[ ]+R_X86_64_PC8[ ]+xtrn\+0xf+f
-.*[ ]+R_X86_64_GOT64[ ]+xtrn
.*[ ]+R_X86_64_GOT32[ ]+xtrn
.*[ ]+R_X86_64_GOT32[ ]+xtrn
-.*[ ]+R_X86_64_GOTOFF64[ ]+xtrn
.*[ ]+R_X86_64_GOTPCREL[ ]+xtrn
.*[ ]+R_X86_64_GOTPCREL[ ]+xtrn
.*[ ]+R_X86_64_GOTPCREL[ ]+xtrn\+0xf+c
.*[ ]+R_X86_64_TLSLD[ ]+xtrn
.*[ ]+R_X86_64_TLSLD[ ]+xtrn
.*[ ]+R_X86_64_TLSLD[ ]+xtrn\+0xf+c
-.*[ ]+R_X86_64_DTPOFF64[ ]+xtrn
.*[ ]+R_X86_64_DTPOFF32[ ]+xtrn
.*[ ]+R_X86_64_DTPOFF32[ ]+xtrn
-.*[ ]+R_X86_64_TPOFF64[ ]+xtrn
.*[ ]+R_X86_64_TPOFF32[ ]+xtrn
.*[ ]+R_X86_64_TPOFF32[ ]+xtrn
.*[ ]+R_X86_64_TPOFF32[ ]+xtrn
Disassembly of section \.data:
#...
-.*[ ]+R_X86_64_64[ ]+xtrn
-.*[ ]+R_X86_64_PC64[ ]+xtrn
-.*[ ]+R_X86_64_GOT64[ ]+xtrn
-.*[ ]+R_X86_64_GOTOFF64[ ]+xtrn
-.*[ ]+R_X86_64_GOTPCREL64[ ]+xtrn
-.*[ ]+R_X86_64_DTPOFF64[ ]+xtrn
-.*[ ]+R_X86_64_TPOFF64[ ]+xtrn
.*[ ]+R_X86_64_32[ ]+xtrn
.*[ ]+R_X86_64_PC32[ ]+xtrn
.*[ ]+R_X86_64_GOT32[ ]+xtrn
--- /dev/null
+ .macro bad args:vararg
+ .ifdef _bad_
+ \args
+ .endif
+ .endm
+
+ .macro ill args:vararg
+ # This is used to mark entries that aren't handled consistently,
+ # and thus shouldn't currently be checked for.
+ # \args
+ .endm
+
+ .text
+_start:
+ add $xtrn, %rax
+ mov $xtrn, %eax
+ mov $xtrn, %ax
+ mov $xtrn, %al
+ mov xtrn(%rbx), %eax
+ mov xtrn(%ebx), %eax
+
+ add $(xtrn - .), %rax
+ill mov $(xtrn - .), %eax
+ mov $(xtrn - .), %ax
+ mov $(xtrn - .), %al
+ mov xtrn(%rip), %eax
+ mov xtrn(%eip), %eax
+ call xtrn
+ jrcxz xtrn
+
+ add $xtrn@got, %rax
+bad mov $xtrn@got, %eax
+bad mov $xtrn@got, %ax
+bad mov $xtrn@got, %al
+ mov xtrn@got(%rbx), %eax
+bad mov xtrn@got(%ebx), %eax
+bad call xtrn@got
+
+bad add $xtrn@gotoff, %rax
+bad mov $xtrn@gotoff, %eax
+bad mov $xtrn@gotoff, %ax
+bad mov $xtrn@gotoff, %al
+bad mov xtrn@gotoff(%rbx), %eax
+bad mov xtrn@gotoff(%ebx), %eax
+bad call xtrn@gotoff
+
+bad movabs $xtrn@gotpcrel, %rax
+ add $xtrn@gotpcrel, %rax
+bad mov $xtrn@gotpcrel, %eax
+bad mov $xtrn@gotpcrel, %ax
+bad mov $xtrn@gotpcrel, %al
+ mov xtrn@gotpcrel(%rbx), %eax
+bad mov xtrn@gotpcrel(%ebx), %eax
+ call xtrn@gotpcrel
+
+ill movabs $_GLOBAL_OFFSET_TABLE_, %rax
+ add $_GLOBAL_OFFSET_TABLE_, %rax
+ill add $_GLOBAL_OFFSET_TABLE_, %eax
+ill add $_GLOBAL_OFFSET_TABLE_, %ax
+ill add $_GLOBAL_OFFSET_TABLE_, %al
+ lea _GLOBAL_OFFSET_TABLE_(%rip), %rax
+ lea _GLOBAL_OFFSET_TABLE_(%eip), %rax
+ill movabs $(_GLOBAL_OFFSET_TABLE_ - .), %rax
+ add $(_GLOBAL_OFFSET_TABLE_ - .), %rax
+ill add $(_GLOBAL_OFFSET_TABLE_ - .), %eax
+ill add $(_GLOBAL_OFFSET_TABLE_ - .), %ax
+ill add $(_GLOBAL_OFFSET_TABLE_ - .), %al
+
+bad movabs $xtrn@plt, %rax
+ add $xtrn@plt, %rax
+bad mov $xtrn@plt, %eax
+bad mov $xtrn@plt, %ax
+bad mov $xtrn@plt, %al
+ mov xtrn@plt(%rbx), %eax
+bad mov xtrn@plt(%ebx), %eax
+ call xtrn@plt
+bad jrcxz xtrn@plt
+
+bad movabs $xtrn@tlsgd, %rax
+ add $xtrn@tlsgd, %rax
+bad mov $xtrn@tlsgd, %eax
+bad mov $xtrn@tlsgd, %ax
+bad mov $xtrn@tlsgd, %al
+ mov xtrn@tlsgd(%rbx), %eax
+bad mov xtrn@tlsgd(%ebx), %eax
+ call xtrn@tlsgd
+
+bad movabs $xtrn@gottpoff, %rax
+ add $xtrn@gottpoff, %rax
+bad mov $xtrn@gottpoff, %eax
+bad mov $xtrn@gottpoff, %ax
+bad mov $xtrn@gottpoff, %al
+ mov xtrn@gottpoff(%rbx), %eax
+bad mov xtrn@gottpoff(%ebx), %eax
+ call xtrn@gottpoff
+
+bad movabs $xtrn@tlsld, %rax
+ add $xtrn@tlsld, %rax
+bad mov $xtrn@tlsld, %eax
+bad mov $xtrn@tlsld, %ax
+bad mov $xtrn@tlsld, %al
+ mov xtrn@tlsld(%rbx), %eax
+bad mov xtrn@tlsld(%ebx), %eax
+ call xtrn@tlsld
+
+ add $xtrn@dtpoff, %rax
+bad mov $xtrn@dtpoff, %eax
+bad mov $xtrn@dtpoff, %ax
+bad mov $xtrn@dtpoff, %al
+ mov xtrn@dtpoff(%rbx), %eax
+bad mov xtrn@dtpoff(%ebx), %eax
+bad call xtrn@dtpoff
+
+ add $xtrn@tpoff, %rax
+bad mov $xtrn@tpoff, %eax
+bad mov $xtrn@tpoff, %ax
+bad mov $xtrn@tpoff, %al
+ mov xtrn@tpoff(%rbx), %eax
+bad mov xtrn@tpoff(%ebx), %eax
+bad call xtrn@tpoff
+
+ .data
+ .long xtrn
+ .long xtrn - .
+ .long xtrn@got
+bad .long xtrn@gotoff
+ .long xtrn@gotpcrel
+ .long _GLOBAL_OFFSET_TABLE_
+ .long _GLOBAL_OFFSET_TABLE_ - .
+ .long xtrn@plt
+ .long xtrn@tlsgd
+ .long xtrn@gottpoff
+ .long xtrn@tlsld
+ .long xtrn@dtpoff
+ .long xtrn@tpoff
+
+ .slong xtrn
+ .slong xtrn - .
+ .slong xtrn@got
+bad .slong xtrn@gotoff
+ .slong xtrn@gotpcrel
+ .slong _GLOBAL_OFFSET_TABLE_
+ .slong _GLOBAL_OFFSET_TABLE_ - .
+ .slong xtrn@plt
+ .slong xtrn@tlsgd
+ .slong xtrn@gottpoff
+ .slong xtrn@tlsld
+ .slong xtrn@dtpoff
+ .slong xtrn@tpoff
+
+ .word xtrn
+ .word xtrn - .
+bad .word xtrn@got
+bad .word xtrn@gotoff
+bad .word xtrn@gotpcrel
+ill .word _GLOBAL_OFFSET_TABLE_
+ill .word _GLOBAL_OFFSET_TABLE_ - .
+bad .word xtrn@plt
+bad .word xtrn@tlsgd
+bad .word xtrn@gottpoff
+bad .word xtrn@tlsld
+bad .word xtrn@dtpoff
+bad .word xtrn@tpoff
+
+ .byte xtrn
+ .byte xtrn - .
+bad .byte xtrn@got
+bad .byte xtrn@gotoff
+bad .byte xtrn@gotpcrel
+ill .byte _GLOBAL_OFFSET_TABLE_
+ill .byte _GLOBAL_OFFSET_TABLE_ - .
+bad .byte xtrn@plt
+bad .byte xtrn@tlsgd
+bad .byte xtrn@gottpoff
+bad .byte xtrn@tlsld
+bad .byte xtrn@dtpoff
+bad .byte xtrn@tpoff
+
+ .text
+ mov xtrn@tpoff (%rbx), %eax
-#source: ../x86-64-pcrel.s
#objdump: -drw
#name: x86-64 (ILP32) pcrel
[ ]*[0-9a-f]+:[ ]+66 b8 00 00[ ]+movw?[ ]+\$(0x)?0,%ax[ ]*[0-9a-f]+:[ ]+R_X86_64_PC16[ ]+xtrn\+(0x)?2
[ ]*[0-9a-f]+:[ ]+b8( 00){4}[ ]+movl?[ ]+\$(0x)?0,%eax[ ]*[0-9a-f]+:[ ]+R_X86_64_PC32[ ]+xtrn\+(0x)?1
[ ]*[0-9a-f]+:[ ]+48 c7 c0( 00){4}[ ]+movq?[ ]+\$(0x)?0,%rax[ ]*[0-9a-f]+:[ ]+R_X86_64_PC32[ ]+xtrn\+(0x)?3
-[ ]*[0-9a-f]+:[ ]+48 b8( 00){8}[ ]+mov(abs)?q?[ ]+\$(0x)?0,%rax[ ]*[0-9a-f]+:[ ]+R_X86_64_PC64[ ]+xtrn\+(0x)?2
[ ]*[0-9a-f]+:[ ]+b0 00[ ]+movb?[ ]+\$(0x)?0,%al[ ]*[0-9a-f]+:[ ]+R_X86_64_8[ ]+xtrn
[ ]*[0-9a-f]+:[ ]+66 b8 00 00[ ]+movw?[ ]+\$(0x)?0,%ax[ ]*[0-9a-f]+:[ ]+R_X86_64_16[ ]+xtrn
[ ]*[0-9a-f]+:[ ]+b8( 00){4}[ ]+movl?[ ]+\$(0x)?0,%eax[ ]*[0-9a-f]+:[ ]+R_X86_64_32[ ]+xtrn
[ ]*[0-9a-f]+:[ ]+48 c7 c0( 00){4}[ ]+movq?[ ]+\$(0x)?0,%rax[ ]*[0-9a-f]+:[ ]+R_X86_64_32S[ ]+xtrn
-[ ]*[0-9a-f]+:[ ]+48 b8( 00){8}[ ]+mov(abs)?q?[ ]+\$(0x)?0,%rax[ ]*[0-9a-f]+:[ ]+R_X86_64_64[ ]+xtrn
#pass
--- /dev/null
+ .text
+_start:
+ movb $(xtrn - .), %al
+ movw $(xtrn - .), %ax
+ movl $(xtrn - .), %eax
+ movq $(xtrn - .), %rax
+
+ movb $xtrn, %al
+ movw $xtrn, %ax
+ movl $xtrn, %eax
+ movq $xtrn, %rax