+2002-10-21 Jakub Jelinek <jakub@redhat.com>
+
+ * config/i386/i386.c (x86_64_sign_extended_value): Add allow_rip
+ argument. In CM_SMALL_PIC model consider SYMBOL_REFs binding locally or
+ from constant pool or LABEL_REFs as sign extended if allow_rip.
+ Change all +-1GB limits to +-16MB.
+ (x86_64_general_operand, x86_64_szext_general_operand,
+ x86_64_nonmemory_operand, x86_64_movabs_operand,
+ x86_64_szext_nonmemory_operand, x86_64_immediate_operand,
+ legitimate_address_p, ix86_expand_int_movcc): Update callers.
+ (local_symbolic_operand): Don't allow offsets bigger than +-16MB
+ in CM_SMALL_PIC model.
+ (legitimate_pic_address_disp_p): Don't check offsets before
+ calling local_symbolic_operand.
+ (legitimize_pic_address): Force offsets bigger than +-16MB into
+ register.
+ * config/i386/i386.h (EXTRA_CONSTRAINT, CONST_COSTS): Likewise.
+ * config/i386/i386-protos.h (x86_64_sign_extended_value): Update
+ prototype.
+
+ * configure.in: Test for @GOTNTPOFF and @INDNTPOFF on IA-32 too.
+ Add x86-64 test. Set tls_first_minor to 14 on IA-32 and x86-64.
+ * configure: Rebuilt.
+ * config/i386/i386.c (x86_64_sign_extended_value): Don't allow TLS
+ SYMBOL_REFs unless enclosed in UNSPEC. Handle UNSPEC_DTPOFF,
+ UNSPEC_GOTNTPOFF and UNSPEC_NTPOFF.
+ (legitimate_address_p): Allow foo@dtpoff(base) even on TARGET_64BIT
+ -fpic.
+ (ix86_encode_section_info): Don't ever generate TLSGD or TLSLD for
+ non-pic code if TARGET_64BIT.
+ (legitimize_address): Generate 64-bit TLS sequences.
+ (output_pic_addr_const): Support x86-64 TLS operators.
+ (i386_output_dwarf_dtprel): Output 64-bit DTPOFF as .long f@DTPOFF, 0.
+ (print_operand_address): Use %fs instead of %gs on TARGET_64BIT.
+ Don't append (%rip) in 64-bit TLSGD and TLSLD sequences.
+ (output_addr_const_extra): Support x86-64 TLS operators.
+ (maybe_get_pool_constant): Handle TARGET_64BIT -fpic.
+ (ix86_tls_get_addr): Use __tls_get_addr on TARGET_64BIT
+ unconditionally.
+ * config/i386/i386.md (*tls_global_dynamic_gnu): Renamed to...
+ (*tls_global_dynamic_32_gnu): ..., add !TARGET_64BIT.
+ (*tls_global_dynamic_sun): Renamed to...
+ (*tls_global_dynamic_32_sun): ..., add !TARGET_64BIT.
+ (tls_global_dynamic): Renamed to...
+ (tls_global_dynamic_32): ... this.
+ (tls_global_dynamic_64, *tls_global_dynamic_64): New.
+ (*tls_local_dynamic_base_dynamic_gnu): Renamed to...
+ (*tls_local_dynamic_base_dynamic_32_gnu): ..., add !TARGET_64BIT.
+ (*tls_local_dynamic_base_dynamic_sun): Renamed to...
+ (*tls_local_dynamic_base_dynamic_32_sun): ..., add !TARGET_64BIT.
+ (tls_local_dynamic_base_dynamic): Renamed to...
+ (tls_local_dynamic_base_dynamic_32): ... this.
+ (tls_local_dynamic_base_dynamic_64,
+ *tls_local_dynamic_base_dynamic_64): New.
+ (*tls_local_dynamic_once): Renamed to...
+ (*tls_local_dynamic_32_once): ... this.
+
2002-10-21 Ulrich Weigand <uweigand@de.ibm.com>
* libgcc2.c: Inline __udiv_w_sdiv when compiling __udivdi3,
return general_operand (op, mode);
if (nonimmediate_operand (op, mode))
return 1;
- return x86_64_sign_extended_value (op);
+ return x86_64_sign_extended_value (op, 1);
}
/* Return nonzero if OP is general operand representable on x86_64
return general_operand (op, mode);
if (nonimmediate_operand (op, mode))
return 1;
- return x86_64_sign_extended_value (op) || x86_64_zero_extended_value (op);
+ return x86_64_sign_extended_value (op, 1) || x86_64_zero_extended_value (op);
}
/* Return nonzero if OP is nonmemory operand representable on x86_64. */
return nonmemory_operand (op, mode);
if (register_operand (op, mode))
return 1;
- return x86_64_sign_extended_value (op);
+ return x86_64_sign_extended_value (op, 1);
}
/* Return nonzero if OP is nonmemory operand acceptable by movabs patterns. */
{
if (!TARGET_64BIT || !flag_pic)
return nonmemory_operand (op, mode);
- if (register_operand (op, mode) || x86_64_sign_extended_value (op))
+ if (register_operand (op, mode) || x86_64_sign_extended_value (op, 0))
return 1;
if (CONSTANT_P (op) && !symbolic_reference_mentioned_p (op))
return 1;
return nonmemory_operand (op, mode);
if (register_operand (op, mode))
return 1;
- return x86_64_sign_extended_value (op) || x86_64_zero_extended_value (op);
+ return x86_64_sign_extended_value (op, 0) || x86_64_zero_extended_value (op);
}
/* Return nonzero if OP is immediate operand representable on x86_64. */
{
if (!TARGET_64BIT)
return immediate_operand (op, mode);
- return x86_64_sign_extended_value (op);
+ return x86_64_sign_extended_value (op, 0);
}
/* Return nonzero if OP is immediate operand representable on x86_64. */
if (GET_CODE (op) == CONST
&& GET_CODE (XEXP (op, 0)) == PLUS
- && GET_CODE (XEXP (XEXP (op, 0), 1)) == CONST_INT)
+ && GET_CODE (XEXP (XEXP (op, 0), 1)) == CONST_INT
+ && (ix86_cmodel != CM_SMALL_PIC
+ || (INTVAL (XEXP (XEXP (op, 0), 1)) >= -16*1024*1024
+ && INTVAL (XEXP (XEXP (op, 0), 1)) < 16*1024*1024)))
op = XEXP (XEXP (op, 0), 0);
if (GET_CODE (op) != SYMBOL_REF)
\f
/* Return 1 if VALUE can be stored in the sign extended immediate field. */
int
-x86_64_sign_extended_value (value)
+x86_64_sign_extended_value (value, allow_rip)
rtx value;
+ int allow_rip;
{
switch (GET_CODE (value))
{
}
break;
- /* For certain code models, the symbolic references are known to fit. */
+ /* For certain code models, the symbolic references are known to fit.
+ in CM_SMALL_PIC model we know it fits if it is local to the shared
+ library. Don't count TLS SYMBOL_REFs here, since they should fit
+ only if inside of UNSPEC handled below. */
case SYMBOL_REF:
- return ix86_cmodel == CM_SMALL || ix86_cmodel == CM_KERNEL;
+ return (ix86_cmodel == CM_SMALL || ix86_cmodel == CM_KERNEL
+ || (allow_rip
+ && ix86_cmodel == CM_SMALL_PIC
+ && (CONSTANT_POOL_ADDRESS_P (value)
+ || SYMBOL_REF_FLAG (value))
+ && ! tls_symbolic_operand (value, GET_MODE (value))));
/* For certain code models, the code is near as well. */
case LABEL_REF:
- return ix86_cmodel != CM_LARGE && ix86_cmodel != CM_SMALL_PIC;
+ return ix86_cmodel != CM_LARGE
+ && (allow_rip || ix86_cmodel != CM_SMALL_PIC);
/* We also may accept the offsetted memory references in certain special
cases. */
case CONST:
- if (GET_CODE (XEXP (value, 0)) == UNSPEC
- && XINT (XEXP (value, 0), 1) == UNSPEC_GOTPCREL)
- return 1;
- else if (GET_CODE (XEXP (value, 0)) == PLUS)
+ if (GET_CODE (XEXP (value, 0)) == UNSPEC)
+ switch (XINT (XEXP (value, 0), 1))
+ {
+ case UNSPEC_GOTPCREL:
+ case UNSPEC_DTPOFF:
+ case UNSPEC_GOTNTPOFF:
+ case UNSPEC_NTPOFF:
+ return 1;
+ default:
+ break;
+ }
+ if (GET_CODE (XEXP (value, 0)) == PLUS)
{
rtx op1 = XEXP (XEXP (value, 0), 0);
rtx op2 = XEXP (XEXP (value, 0), 1);
switch (GET_CODE (op1))
{
case SYMBOL_REF:
- /* For CM_SMALL assume that latest object is 1MB before
+ /* For CM_SMALL assume that latest object is 16MB before
end of 31bits boundary. We may also accept pretty
large negative constants knowing that all objects are
in the positive half of address space. */
if (ix86_cmodel == CM_SMALL
- && offset < 1024*1024*1024
+ && offset < 16*1024*1024
&& trunc_int_for_mode (offset, SImode) == offset)
return 1;
/* For CM_KERNEL we know that all object resist in the
&& offset > 0
&& trunc_int_for_mode (offset, SImode) == offset)
return 1;
+ /* For CM_SMALL_PIC, we can make similar assumptions
+ as for CM_SMALL model, if we know the symbol is local
+ to the shared library. Disallow any TLS symbols,
+ since they should always be enclosed in an UNSPEC. */
+ if (ix86_cmodel == CM_SMALL_PIC
+ && allow_rip
+ && (CONSTANT_POOL_ADDRESS_P (op1)
+ || SYMBOL_REF_FLAG (op1))
+ && ! tls_symbolic_operand (op1, GET_MODE (op1))
+ && offset < 16*1024*1024
+ && offset >= -16*1024*1024
+ && trunc_int_for_mode (offset, SImode) == offset)
+ return 1;
break;
case LABEL_REF:
/* These conditions are similar to SYMBOL_REF ones, just the
constraints for code models differ. */
- if ((ix86_cmodel == CM_SMALL || ix86_cmodel == CM_MEDIUM)
- && offset < 1024*1024*1024
+ if ((ix86_cmodel == CM_SMALL || ix86_cmodel == CM_MEDIUM
+ || (ix86_cmodel == CM_SMALL_PIC && allow_rip
+ && offset >= -16*1024*1024))
+ && offset < 16*1024*1024
&& trunc_int_for_mode (offset, SImode) == offset)
return 1;
if (ix86_cmodel == CM_KERNEL
&& trunc_int_for_mode (offset, SImode) == offset)
return 1;
break;
+ case UNSPEC:
+ switch (XINT (op1, 1))
+ {
+ case UNSPEC_DTPOFF:
+ case UNSPEC_NTPOFF:
+ if (offset > 0
+ && trunc_int_for_mode (offset, SImode) == offset)
+ return 1;
+ }
+ break;
default:
return 0;
}
/* In 64bit mode we can allow direct addresses of symbols and labels
when they are not dynamic symbols. */
- if (TARGET_64BIT)
- {
- rtx x = disp;
- if (GET_CODE (disp) == CONST)
- x = XEXP (disp, 0);
- /* ??? Handle PIC code models */
- if (GET_CODE (x) == PLUS
- && (GET_CODE (XEXP (x, 1)) == CONST_INT
- && ix86_cmodel == CM_SMALL_PIC
- && INTVAL (XEXP (x, 1)) < 1024*1024*1024
- && INTVAL (XEXP (x, 1)) > -1024*1024*1024))
- x = XEXP (x, 0);
- if (local_symbolic_operand (x, Pmode))
- return 1;
- }
+ if (TARGET_64BIT && local_symbolic_operand (disp, Pmode))
+ return 1;
if (GET_CODE (disp) != CONST)
return 0;
disp = XEXP (disp, 0);
if (TARGET_64BIT)
{
- if (!x86_64_sign_extended_value (disp))
+ if (!x86_64_sign_extended_value (disp, !(index || base)))
{
reason = "displacement is out of range";
goto report_error;
is_legitimate_pic:
if (TARGET_64BIT && (index || base))
{
- reason = "non-constant pic memory reference";
- goto report_error;
+ /* foo@dtpoff(%rX) is ok. */
+ if (GET_CODE (disp) != CONST
+ || GET_CODE (XEXP (disp, 0)) != PLUS
+ || GET_CODE (XEXP (XEXP (disp, 0), 0)) != UNSPEC
+ || GET_CODE (XEXP (XEXP (disp, 0), 1)) != CONST_INT
+ || (XINT (XEXP (XEXP (disp, 0), 0), 1) != UNSPEC_DTPOFF
+ && XINT (XEXP (XEXP (disp, 0), 0), 1) != UNSPEC_NTPOFF))
+ {
+ reason = "non-constant pic memory reference";
+ goto report_error;
+ }
}
- if (! legitimate_pic_address_disp_p (disp))
+ else if (! legitimate_pic_address_disp_p (disp))
{
reason = "displacement is an invalid pic construct";
goto report_error;
}
else
{
- /* ??? We need to limit offsets here. */
+ if (INTVAL (op1) < -16*1024*1024
+ || INTVAL (op1) >= 16*1024*1024)
+ new = gen_rtx_PLUS (Pmode, op0, force_reg (Pmode, op1));
}
}
else
size_t len;
enum tls_model kind = decl_tls_model (decl);
+ if (TARGET_64BIT && ! flag_pic)
+ {
+ /* x86-64 doesn't allow non-pic code for shared libraries,
+ so don't generate GD/LD TLS models for non-pic code. */
+ switch (kind)
+ {
+ case TLS_MODEL_GLOBAL_DYNAMIC:
+ kind = TLS_MODEL_INITIAL_EXEC; break;
+ case TLS_MODEL_LOCAL_DYNAMIC:
+ kind = TLS_MODEL_LOCAL_EXEC; break;
+ default:
+ break;
+ }
+ }
+
symbol_str = XSTR (symbol, 0);
if (symbol_str[0] == '%')
if (log)
{
rtx dest, base, off, pic;
+ int type;
switch (log)
{
case TLS_MODEL_GLOBAL_DYNAMIC:
dest = gen_reg_rtx (Pmode);
- emit_insn (gen_tls_global_dynamic (dest, x));
+ if (TARGET_64BIT)
+ {
+ rtx rax = gen_rtx_REG (Pmode, 0), insns;
+
+ start_sequence ();
+ emit_call_insn (gen_tls_global_dynamic_64 (rax, x));
+ insns = get_insns ();
+ end_sequence ();
+
+ emit_libcall_block (insns, dest, rax, x);
+ }
+ else
+ emit_insn (gen_tls_global_dynamic_32 (dest, x));
break;
case TLS_MODEL_LOCAL_DYNAMIC:
base = gen_reg_rtx (Pmode);
- emit_insn (gen_tls_local_dynamic_base (base));
+ if (TARGET_64BIT)
+ {
+ rtx rax = gen_rtx_REG (Pmode, 0), insns, note;
+
+ start_sequence ();
+ emit_call_insn (gen_tls_local_dynamic_base_64 (rax));
+ insns = get_insns ();
+ end_sequence ();
+
+ note = gen_rtx_EXPR_LIST (VOIDmode, const0_rtx, NULL);
+ note = gen_rtx_EXPR_LIST (VOIDmode, ix86_tls_get_addr (), note);
+ emit_libcall_block (insns, base, rax, note);
+ }
+ else
+ emit_insn (gen_tls_local_dynamic_base_32 (base));
off = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, x), UNSPEC_DTPOFF);
off = gen_rtx_CONST (Pmode, off);
return gen_rtx_PLUS (Pmode, base, off);
case TLS_MODEL_INITIAL_EXEC:
- if (flag_pic)
+ if (TARGET_64BIT)
+ {
+ pic = NULL;
+ type = UNSPEC_GOTNTPOFF;
+ }
+ else if (flag_pic)
{
if (reload_in_progress)
regs_ever_live[PIC_OFFSET_TABLE_REGNUM] = 1;
pic = pic_offset_table_rtx;
+ type = TARGET_GNU_TLS ? UNSPEC_GOTNTPOFF : UNSPEC_GOTTPOFF;
}
else if (!TARGET_GNU_TLS)
{
pic = gen_reg_rtx (Pmode);
emit_insn (gen_set_got (pic));
+ type = UNSPEC_GOTTPOFF;
}
else
- pic = NULL;
+ {
+ pic = NULL;
+ type = UNSPEC_INDNTPOFF;
+ }
base = get_thread_pointer ();
- off = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, x),
- !TARGET_GNU_TLS
- ? UNSPEC_GOTTPOFF
- : flag_pic ? UNSPEC_GOTNTPOFF
- : UNSPEC_INDNTPOFF);
+ off = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, x), type);
off = gen_rtx_CONST (Pmode, off);
- if (flag_pic || !TARGET_GNU_TLS)
+ if (pic)
off = gen_rtx_PLUS (Pmode, pic, off);
off = gen_rtx_MEM (Pmode, off);
RTX_UNCHANGING_P (off) = 1;
set_mem_alias_set (off, ix86_GOT_alias_set ());
dest = gen_reg_rtx (Pmode);
- if (TARGET_GNU_TLS)
+ if (TARGET_64BIT || TARGET_GNU_TLS)
{
emit_move_insn (dest, off);
return gen_rtx_PLUS (Pmode, base, dest);
base = get_thread_pointer ();
off = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, x),
- TARGET_GNU_TLS ? UNSPEC_NTPOFF : UNSPEC_TPOFF);
+ (TARGET_64BIT || TARGET_GNU_TLS)
+ ? UNSPEC_NTPOFF : UNSPEC_TPOFF);
off = gen_rtx_CONST (Pmode, off);
- if (TARGET_GNU_TLS)
+ if (TARGET_64BIT || TARGET_GNU_TLS)
return gen_rtx_PLUS (Pmode, base, off);
else
{
fputs ("@TPOFF", file);
break;
case UNSPEC_NTPOFF:
- fputs ("@NTPOFF", file);
+ if (TARGET_64BIT)
+ fputs ("@TPOFF", file);
+ else
+ fputs ("@NTPOFF", file);
break;
case UNSPEC_DTPOFF:
fputs ("@DTPOFF", file);
break;
case UNSPEC_GOTNTPOFF:
- fputs ("@GOTNTPOFF", file);
+ if (TARGET_64BIT)
+ fputs ("@GOTTPOFF(%rip)", file);
+ else
+ fputs ("@GOTNTPOFF", file);
break;
case UNSPEC_INDNTPOFF:
fputs ("@INDNTPOFF", file);
int size;
rtx x;
{
+ fputs (ASM_LONG, file);
+ output_addr_const (file, x);
+ fputs ("@DTPOFF", file);
switch (size)
{
case 4:
- fputs (ASM_LONG, file);
break;
case 8:
-#ifdef ASM_QUAD
- fputs (ASM_QUAD, file);
+ fputs (", 0", file);
break;
-#endif
default:
abort ();
}
-
- output_addr_const (file, x);
- fputs ("@DTPOFF", file);
}
/* In the name of slightly smaller debug output, and to cater to
fputs ("DWORD PTR ", file);
if (ASSEMBLER_DIALECT == ASM_ATT || USER_LABEL_PREFIX[0] == 0)
putc ('%', file);
- fputs ("gs:0", file);
+ if (TARGET_64BIT)
+ fputs ("fs:0", file);
+ else
+ fputs ("gs:0", file);
return;
}
/* Use one byte shorter RIP relative addressing for 64bit mode. */
if (TARGET_64BIT
- && (GET_CODE (addr) == SYMBOL_REF
+ && ((GET_CODE (addr) == SYMBOL_REF
+ && ! tls_symbolic_operand (addr, GET_MODE (addr)))
|| GET_CODE (addr) == LABEL_REF
|| (GET_CODE (addr) == CONST
&& GET_CODE (XEXP (addr, 0)) == PLUS
break;
case UNSPEC_NTPOFF:
output_addr_const (file, op);
- fputs ("@NTPOFF", file);
+ if (TARGET_64BIT)
+ fputs ("@TPOFF", file);
+ else
+ fputs ("@NTPOFF", file);
break;
case UNSPEC_DTPOFF:
output_addr_const (file, op);
break;
case UNSPEC_GOTNTPOFF:
output_addr_const (file, op);
- fputs ("@GOTNTPOFF", file);
+ if (TARGET_64BIT)
+ fputs ("@GOTTPOFF(%rip)", file);
+ else
+ fputs ("@GOTNTPOFF", file);
break;
case UNSPEC_INDNTPOFF:
output_addr_const (file, op);
{
x = XEXP (x, 0);
- if (flag_pic)
+ if (flag_pic && ! TARGET_64BIT)
{
if (GET_CODE (x) != PLUS)
return NULL_RTX;
if ((diff == 1 || diff == 2 || diff == 4 || diff == 8
|| diff == 3 || diff == 5 || diff == 9)
- && (mode != DImode || x86_64_sign_extended_value (GEN_INT (cf))))
+ && (mode != DImode || x86_64_sign_extended_value (GEN_INT (cf), 0)))
{
/*
* xorl dest,dest
if (!ix86_tls_symbol)
{
- ix86_tls_symbol = gen_rtx_SYMBOL_REF (Pmode, (TARGET_GNU_TLS
- ? "___tls_get_addr"
- : "__tls_get_addr"));
+ ix86_tls_symbol = gen_rtx_SYMBOL_REF (Pmode,
+ (TARGET_GNU_TLS && !TARGET_64BIT)
+ ? "___tls_get_addr"
+ : "__tls_get_addr");
}
return ix86_tls_symbol;
;; Note that these code sequences must appear exactly as shown
;; in order to allow linker relaxation.
-(define_insn "*tls_global_dynamic_gnu"
+(define_insn "*tls_global_dynamic_32_gnu"
[(set (match_operand:SI 0 "register_operand" "=a")
(unspec:SI [(match_operand:SI 1 "register_operand" "b")
(match_operand:SI 2 "tls_symbolic_operand" "")
(clobber (match_scratch:SI 4 "=d"))
(clobber (match_scratch:SI 5 "=c"))
(clobber (reg:CC 17))]
- "TARGET_GNU_TLS"
+ "!TARGET_64BIT && TARGET_GNU_TLS"
"lea{l}\t{%a2@TLSGD(,%1,1), %0|%0, %a2@TLSGD[%1*1]}\;call\t%P3"
[(set_attr "type" "multi")
(set_attr "length" "12")])
-(define_insn "*tls_global_dynamic_sun"
+(define_insn "*tls_global_dynamic_32_sun"
[(set (match_operand:SI 0 "register_operand" "=a")
(unspec:SI [(match_operand:SI 1 "register_operand" "b")
(match_operand:SI 2 "tls_symbolic_operand" "")
(clobber (match_scratch:SI 4 "=d"))
(clobber (match_scratch:SI 5 "=c"))
(clobber (reg:CC 17))]
- "TARGET_SUN_TLS"
+ "!TARGET_64BIT && TARGET_SUN_TLS"
"lea{l}\t{%a2@DTLNDX(%1), %4|%4, %a2@DTLNDX[%1]}
push{l}\t%4\;call\t%a2@TLSPLT\;pop{l}\t%4\;nop"
[(set_attr "type" "multi")
(set_attr "length" "14")])
-(define_expand "tls_global_dynamic"
+(define_expand "tls_global_dynamic_32"
[(parallel [(set (match_operand:SI 0 "register_operand" "")
(unspec:SI
[(match_dup 2)
operands[3] = ix86_tls_get_addr ();
})
-(define_insn "*tls_local_dynamic_base_gnu"
+(define_insn "*tls_global_dynamic_64"
+ [(set (match_operand:DI 0 "register_operand" "=a")
+ (call (mem:QI (match_operand:DI 2 "call_insn_operand" ""))
+ (match_operand:DI 3 "" "")))
+ (unspec:DI [(match_operand:DI 1 "tls_symbolic_operand" "")]
+ UNSPEC_TLS_GD)]
+ "TARGET_64BIT"
+ ".byte\t0x66\;lea{q}\t{%a1@TLSGD(%%rip), %%rdi|%%rdi, %a1@TLSGD[%%rip]}\;.word\t0x6666\;rex64\;call\t%P2"
+ [(set_attr "type" "multi")
+ (set_attr "length" "16")])
+
+(define_expand "tls_global_dynamic_64"
+ [(parallel [(set (match_operand:DI 0 "register_operand" "")
+ (call (mem:QI (match_dup 2)) (const_int 0)))
+ (unspec:DI [(match_operand:DI 1 "tls_symbolic_operand" "")]
+ UNSPEC_TLS_GD)])]
+ ""
+{
+ operands[2] = ix86_tls_get_addr ();
+})
+
+(define_insn "*tls_local_dynamic_base_32_gnu"
[(set (match_operand:SI 0 "register_operand" "=a")
(unspec:SI [(match_operand:SI 1 "register_operand" "b")
(match_operand:SI 2 "call_insn_operand" "")]
(clobber (match_scratch:SI 3 "=d"))
(clobber (match_scratch:SI 4 "=c"))
(clobber (reg:CC 17))]
- "TARGET_GNU_TLS"
+ "!TARGET_64BIT && TARGET_GNU_TLS"
"lea{l}\t{%&@TLSLDM(%1), %0|%0, %&@TLSLDM[%1]}\;call\t%P2"
[(set_attr "type" "multi")
(set_attr "length" "11")])
-(define_insn "*tls_local_dynamic_base_sun"
+(define_insn "*tls_local_dynamic_base_32_sun"
[(set (match_operand:SI 0 "register_operand" "=a")
(unspec:SI [(match_operand:SI 1 "register_operand" "b")
(match_operand:SI 2 "call_insn_operand" "")]
(clobber (match_scratch:SI 3 "=d"))
(clobber (match_scratch:SI 4 "=c"))
(clobber (reg:CC 17))]
- "TARGET_SUN_TLS"
+ "!TARGET_64BIT && TARGET_SUN_TLS"
"lea{l}\t{%&@TMDNX(%1), %3|%3, %&@TMDNX[%1]}
push{l}\t%3\;call\t%&@TLSPLT\;pop{l}\t%3"
[(set_attr "type" "multi")
(set_attr "length" "13")])
-(define_expand "tls_local_dynamic_base"
+(define_expand "tls_local_dynamic_base_32"
[(parallel [(set (match_operand:SI 0 "register_operand" "")
(unspec:SI [(match_dup 1) (match_dup 2)]
UNSPEC_TLS_LD_BASE))
operands[2] = ix86_tls_get_addr ();
})
+(define_insn "*tls_local_dynamic_base_64"
+ [(set (match_operand:DI 0 "register_operand" "=a")
+ (call (mem:QI (match_operand:DI 1 "call_insn_operand" ""))
+ (match_operand:DI 2 "" "")))
+ (unspec:DI [(const_int 0)] UNSPEC_TLS_LD_BASE)]
+ "TARGET_64BIT"
+ "lea{q}\t{%&@TLSLD(%%rip), %%rdi|%%rdi, %&@TLSLD[%%rip]}\;call\t%P1"
+ [(set_attr "type" "multi")
+ (set_attr "length" "12")])
+
+(define_expand "tls_local_dynamic_base_64"
+ [(parallel [(set (match_operand:DI 0 "register_operand" "")
+ (call (mem:QI (match_dup 1)) (const_int 0)))
+ (unspec:DI [(const_int 0)] UNSPEC_TLS_LD_BASE)])]
+ ""
+{
+ operands[1] = ix86_tls_get_addr ();
+})
+
;; Local dynamic of a single variable is a lose. Show combine how
;; to convert that back to global dynamic.
-(define_insn_and_split "*tls_local_dynamic_once"
+(define_insn_and_split "*tls_local_dynamic_32_once"
[(set (match_operand:SI 0 "register_operand" "=a")
(plus:SI (unspec:SI [(match_operand:SI 1 "register_operand" "b")
(match_operand:SI 2 "call_insn_operand" "")]