int
legitimate_pic_operand_p (rtx x)
{
- if (GET_CODE (x) == SYMBOL_REF
+ if (SYMBOL_REF_P (x)
|| (GET_CODE (x) == CONST
&& GET_CODE (XEXP (x, 0)) == PLUS
&& GET_CODE (XEXP (XEXP (x, 0), 0)) == SYMBOL_REF))
{
gcc_assert (compute_now == (pic_reg != NULL_RTX));
- if (GET_CODE (orig) == SYMBOL_REF
- || GET_CODE (orig) == LABEL_REF)
+ if (SYMBOL_REF_P (orig)
+ || LABEL_REF_P (orig))
{
if (reg == 0)
{
/* References to weak symbols cannot be resolved locally: they
may be overridden by a non-weak definition at link time. */
rtx_insn *insn;
- if ((GET_CODE (orig) == LABEL_REF
- || (GET_CODE (orig) == SYMBOL_REF
+ if ((LABEL_REF_P (orig)
+ || (SYMBOL_REF_P (orig)
&& SYMBOL_REF_LOCAL_P (orig)
&& (SYMBOL_REF_DECL (orig)
? !DECL_WEAK (SYMBOL_REF_DECL (orig)) : 1)
{
*is_readonly = false;
- if (GET_CODE (orig) == LABEL_REF)
+ if (LABEL_REF_P (orig))
{
*is_readonly = true;
return true;
(set (reg r0) (mem (reg r0))).
No extra register is required, and (mem (reg r0)) won't cause the use
of literal pools. */
- if (arm_disable_literal_pool && GET_CODE (x) == SYMBOL_REF
+ if (arm_disable_literal_pool && SYMBOL_REF_P (x)
&& CONSTANT_POOL_ADDRESS_P (x))
return 1;
return 0;
/* This is PC relative data before arm_reorg runs. */
else if (GET_MODE_SIZE (mode) >= 4 && CONSTANT_P (x)
- && GET_CODE (x) == SYMBOL_REF
+ && SYMBOL_REF_P (x)
&& CONSTANT_POOL_ADDRESS_P (x) && !flag_pic
&& !arm_disable_literal_pool)
return 1;
/* This is PC relative data after arm_reorg runs. */
else if ((GET_MODE_SIZE (mode) >= 4 || mode == HFmode)
&& reload_completed
- && (GET_CODE (x) == LABEL_REF
+ && (LABEL_REF_P (x)
|| (GET_CODE (x) == CONST
&& GET_CODE (XEXP (x, 0)) == PLUS
&& GET_CODE (XEXP (XEXP (x, 0), 0)) == LABEL_REF
else if (GET_MODE_CLASS (mode) != MODE_FLOAT
&& GET_MODE_SIZE (mode) == 4
- && GET_CODE (x) == SYMBOL_REF
+ && SYMBOL_REF_P (x)
&& CONSTANT_POOL_ADDRESS_P (x)
&& !arm_disable_literal_pool
&& ! (flag_pic
x = XEXP (XEXP (x, 0), 0);
}
- if (GET_CODE (x) != SYMBOL_REF)
+ if (!SYMBOL_REF_P (x))
return x;
gcc_assert (SYMBOL_REF_TLS_MODEL (x) != 0);
FOR_EACH_SUBRTX (iter, array, x, ALL)
{
const_rtx x = *iter;
- if (GET_CODE (x) == SYMBOL_REF && SYMBOL_REF_TLS_MODEL (x) != 0)
+ if (SYMBOL_REF_P (x) && SYMBOL_REF_TLS_MODEL (x) != 0)
{
/* ARM currently does not provide relocations to encode TLS variables
into AArch32 instructions, only data, so there is no way to
return (CONST_INT_P (x)
|| CONST_DOUBLE_P (x)
|| CONSTANT_ADDRESS_P (x)
- || (TARGET_HAVE_MOVT && GET_CODE (x) == SYMBOL_REF)
+ || (TARGET_HAVE_MOVT && SYMBOL_REF_P (x))
/* On Thumb-1 without MOVT/MOVW and literal pool disabled,
we build the symbol address with upper/lower
relocations. */
\f
#define REG_OR_SUBREG_REG(X) \
(REG_P (X) \
- || (GET_CODE (X) == SUBREG && REG_P (SUBREG_REG (X))))
+ || (SUBREG_P (X) && REG_P (SUBREG_REG (X))))
#define REG_OR_SUBREG_RTX(X) \
(REG_P (X) ? (X) : SUBREG_REG (X))
/* Memory costs quite a lot for the first word, but subsequent words
load at the equivalent of a single insn each. */
return (10 + 4 * ((GET_MODE_SIZE (mode) - 1) / UNITS_PER_WORD)
- + ((GET_CODE (x) == SYMBOL_REF && CONSTANT_POOL_ADDRESS_P (x))
+ + ((SYMBOL_REF_P (x) && CONSTANT_POOL_ADDRESS_P (x))
? 4 : 0));
case IF_THEN_ELSE:
return (COSTS_N_INSNS (1)
+ COSTS_N_INSNS (1)
* ((GET_MODE_SIZE (mode) - 1) / UNITS_PER_WORD)
- + ((GET_CODE (x) == SYMBOL_REF && CONSTANT_POOL_ADDRESS_P (x))
+ + ((SYMBOL_REF_P (x) && CONSTANT_POOL_ADDRESS_P (x))
? COSTS_N_INSNS (1) : 0));
case IF_THEN_ELSE:
constant pool are cached, and that others will miss. This is a
hack. */
- if ((GET_CODE (src_mem) == SYMBOL_REF
+ if ((SYMBOL_REF_P (src_mem)
&& CONSTANT_POOL_ADDRESS_P (src_mem))
|| reg_mentioned_p (stack_pointer_rtx, src_mem)
|| reg_mentioned_p (frame_pointer_rtx, src_mem)
ind = XEXP (op, 0);
if (reload_completed
- && (GET_CODE (ind) == LABEL_REF
+ && (LABEL_REF_P (ind)
|| (GET_CODE (ind) == CONST
&& GET_CODE (XEXP (ind, 0)) == PLUS
&& GET_CODE (XEXP (XEXP (ind, 0), 0)) == LABEL_REF
ind = XEXP (op, 0);
if (reload_completed
- && (GET_CODE (ind) == LABEL_REF
+ && (LABEL_REF_P (ind)
|| (GET_CODE (ind) == CONST
&& GET_CODE (XEXP (ind, 0)) == PLUS
&& GET_CODE (XEXP (XEXP (ind, 0), 0)) == LABEL_REF
ind = XEXP (op, 0);
if (reload_completed
- && (GET_CODE (ind) == LABEL_REF
+ && (LABEL_REF_P (ind)
|| (GET_CODE (ind) == CONST
&& GET_CODE (XEXP (ind, 0)) == PLUS
&& GET_CODE (XEXP (XEXP (ind, 0), 0)) == LABEL_REF
const char * fmt;
int i;
- if (GET_CODE (x) == SYMBOL_REF)
+ if (SYMBOL_REF_P (x))
return 1;
/* UNSPEC_TLS entries for a symbol include the SYMBOL_REF, but they
const char * fmt;
int i;
- if (GET_CODE (x) == LABEL_REF)
+ if (LABEL_REF_P (x))
return 1;
/* UNSPEC_TLS entries for a symbol include a LABEL_REF for the referencing
offset = const0_rtx;
if ((REG_P (reg = XEXP (operands[nops + i], 0))
- || (GET_CODE (reg) == SUBREG
+ || (SUBREG_P (reg)
&& REG_P (reg = SUBREG_REG (reg))))
|| (GET_CODE (XEXP (operands[nops + i], 0)) == PLUS
&& ((REG_P (reg = XEXP (XEXP (operands[nops + i], 0), 0)))
- || (GET_CODE (reg) == SUBREG
+ || (SUBREG_P (reg)
&& REG_P (reg = SUBREG_REG (reg))))
&& (CONST_INT_P (offset
= XEXP (XEXP (operands[nops + i], 0), 1)))))
offset = const0_rtx;
if ((REG_P (reg = XEXP (operands[nops + i], 0))
- || (GET_CODE (reg) == SUBREG
+ || (SUBREG_P (reg)
&& REG_P (reg = SUBREG_REG (reg))))
|| (GET_CODE (XEXP (operands[nops + i], 0)) == PLUS
&& ((REG_P (reg = XEXP (XEXP (operands[nops + i], 0), 0)))
- || (GET_CODE (reg) == SUBREG
+ || (SUBREG_P (reg)
&& REG_P (reg = SUBREG_REG (reg))))
&& (CONST_INT_P (offset
= XEXP (XEXP (operands[nops + i], 0), 1)))))
/* A compare with a shifted operand. Because of canonicalization, the
comparison will have to be swapped when we emit the assembler. */
if (GET_MODE (y) == SImode
- && (REG_P (y) || (GET_CODE (y) == SUBREG))
+ && (REG_P (y) || (SUBREG_P (y)))
&& (GET_CODE (x) == ASHIFT || GET_CODE (x) == ASHIFTRT
|| GET_CODE (x) == LSHIFTRT || GET_CODE (x) == ROTATE
|| GET_CODE (x) == ROTATERT))
non-canonical, but arm_gen_compare_reg uses this to generate the
correct canonical form. */
if (GET_MODE (y) == SImode
- && (REG_P (y) || GET_CODE (y) == SUBREG)
+ && (REG_P (y) || SUBREG_P (y))
&& CONST_INT_P (x))
return CC_RSBmode;
/* This operation is performed swapped, but since we only rely on the Z
flag we don't need an additional mode. */
if (GET_MODE (y) == SImode
- && (REG_P (y) || (GET_CODE (y) == SUBREG))
+ && (REG_P (y) || (SUBREG_P (y)))
&& GET_CODE (x) == NEG
&& (op == EQ || op == NE))
return CC_Zmode;
rtx base, scratch;
HOST_WIDE_INT offset = 0;
- if (GET_CODE (ref) == SUBREG)
+ if (SUBREG_P (ref))
{
offset = SUBREG_BYTE (ref);
ref = SUBREG_REG (ref);
rtx base, scratch;
HOST_WIDE_INT offset = 0;
- if (GET_CODE (ref) == SUBREG)
+ if (SUBREG_P (ref))
{
offset = SUBREG_BYTE (ref);
ref = SUBREG_REG (ref);
return false;
/* Can't deal with subregs. */
- if (GET_CODE (mem) == SUBREG)
+ if (SUBREG_P (mem))
return false;
gcc_assert (MEM_P (mem));
if (TARGET_VXWORKS_RTP
&& flag_pic
&& !sibcall
- && GET_CODE (addr) == SYMBOL_REF
+ && SYMBOL_REF_P (addr)
&& (SYMBOL_REF_DECL (addr)
? !targetm.binds_local_p (SYMBOL_REF_DECL (addr))
: !SYMBOL_REF_LOCAL_P (addr)))
}
else
{
- if (TARGET_HAVE_MVE && GET_CODE (addr) == LABEL_REF)
+ if (TARGET_HAVE_MVE && LABEL_REF_P (addr))
sprintf (buff, "v%sr.64\t%%P0, %%1", load ? "ld" : "st");
else
sprintf (buff, "v%sr%%?\t%%P0, %%1", load ? "ld" : "st");
{
ops[0] = gen_rtx_REG (DImode, REGNO (reg) + 2 * overlap);
ops[1] = adjust_address (mem, SImode, 8 * overlap);
- if (TARGET_HAVE_MVE && GET_CODE (addr) == LABEL_REF)
+ if (TARGET_HAVE_MVE && LABEL_REF_P (addr))
sprintf (buff, "v%sr.32\t%%P0, %%1", load ? "ld" : "st");
else
sprintf (buff, "v%sr%%?\t%%P0, %%1", load ? "ld" : "st");
if (GET_CODE (addr) == CONST && GET_CODE (XEXP (addr, 0)) == PLUS)
addr = XEXP (addr, 0);
- if (GET_CODE (addr) == LABEL_REF || GET_CODE (addr) == PLUS)
+ if (LABEL_REF_P (addr) || GET_CODE (addr) == PLUS)
{
int insns = REG_NREGS (reg) / 2;
return insns * 4;
/* Mark symbols as position independent. We only do this in the
.text segment, not in the .data segment. */
if (NEED_GOT_RELOC && flag_pic && making_const_table &&
- (GET_CODE (x) == SYMBOL_REF || GET_CODE (x) == LABEL_REF))
+ (SYMBOL_REF_P (x) || LABEL_REF_P (x)))
{
/* See legitimize_pic_address for an explanation of the
TARGET_VXWORKS_RTP check. */
they may be overridden by a non-weak definition at link
time. */
if (!arm_pic_data_is_text_relative
- || (GET_CODE (x) == SYMBOL_REF
+ || (SYMBOL_REF_P (x)
&& (!SYMBOL_REF_LOCAL_P (x)
|| (SYMBOL_REF_DECL (x)
? DECL_WEAK (SYMBOL_REF_DECL (x)) : 0)
gcc_assert (code == ASHIFT || code == ASHIFTRT || code == LSHIFTRT);
gcc_assert (out
- && (REG_P (out) || GET_CODE (out) == SUBREG)
+ && (REG_P (out) || SUBREG_P (out))
&& GET_MODE (out) == DImode);
gcc_assert (in
- && (REG_P (in) || GET_CODE (in) == SUBREG)
+ && (REG_P (in) || SUBREG_P (in))
&& GET_MODE (in) == DImode);
gcc_assert (amount
- && (((REG_P (amount) || GET_CODE (amount) == SUBREG)
+ && (((REG_P (amount) || SUBREG_P (amount))
&& GET_MODE (amount) == SImode)
|| CONST_INT_P (amount)));
gcc_assert (scratch1 == NULL
if (target_word_relocations)
return false;
- if (GET_CODE (tmp) == SYMBOL_REF || GET_CODE (tmp) == LABEL_REF)
+ if (SYMBOL_REF_P (tmp) || LABEL_REF_P (tmp))
return true;
/* (const (plus: symbol_ref const_int)) */
if (GET_CODE (addr) == CONST)
addr = XEXP (addr, 0);
- if (GET_CODE (addr) == REG)
+ if (REG_P (addr))
{
*base = addr;
*offset = const0_rtx;
src = SET_SRC (x);
dest = SET_DEST (x);
- if (GET_CODE (src) == REG && GET_CODE (dest) == MEM)
+ if (REG_P (src) && MEM_P (dest))
{
*is_load = false;
extract_base_offset_in_addr (dest, base, offset);
}
- else if (GET_CODE (src) == MEM && GET_CODE (dest) == REG)
+ else if (MEM_P (src) && REG_P (dest))
{
*is_load = true;
extract_base_offset_in_addr (src, base, offset);