/* Analyze RTL for GNU compiler.
- Copyright (C) 1987-2019 Free Software Foundation, Inc.
+ Copyright (C) 1987-2020 Free Software Foundation, Inc.
This file is part of GCC.
#include "addresses.h"
#include "rtl-iter.h"
#include "hard-reg-set.h"
+#include "function-abi.h"
/* Forward declarations */
static void set_of_1 (rtx, const_rtx, void *);
/* A previous iteration might also have moved from the stack to the
heap, in which case the heap array will already be big enough. */
if (vec_safe_length (array.heap) <= i)
- vec_safe_grow (array.heap, i + 1);
+ vec_safe_grow (array.heap, i + 1, true);
base = array.heap->address ();
memcpy (base, array.stack, sizeof (array.stack));
base[LOCAL_ELEMS] = x;
if (to == from)
return 0;
- /* It is not safe to call INITIAL_ELIMINATION_OFFSET
- before the reload pass. We need to give at least
- an estimation for the resulting frame size. */
- if (! reload_completed)
+ /* It is not safe to call INITIAL_ELIMINATION_OFFSET before the epilogue
+ is completed, but we need to give at least an estimate for the stack
+ pointer based on the frame size. */
+ if (!epilogue_completed)
{
offset1 = crtl->outgoing_args_size + get_frame_size ();
#if !STACK_GROWS_DOWNWARD
return (!known_size_p (decl_size) || known_eq (decl_size, 0)
? maybe_ne (offset, 0)
- : maybe_gt (offset + size, decl_size));
+ : !known_subrange_p (offset, size, 0, decl_size));
}
return 0;
/* Return the CALL in X if there is one. */
rtx
-get_call_rtx_from (rtx x)
+get_call_rtx_from (const rtx_insn *insn)
{
- if (INSN_P (x))
- x = PATTERN (x);
+ rtx x = PATTERN (insn);
if (GET_CODE (x) == PARALLEL)
x = XVECEXP (x, 0, 0);
if (GET_CODE (x) == SET)
return x;
return NULL_RTX;
}
+
+/* Get the declaration of the function called by INSN. */
+
+tree
+get_call_fndecl (const rtx_insn *insn)
+{
+ rtx note, datum;
+
+ note = find_reg_note (insn, REG_CALL_DECL, NULL_RTX);
+ if (note == NULL_RTX)
+ return NULL_TREE;
+
+ datum = XEXP (note, 0);
+ if (datum != NULL_RTX)
+ return SYMBOL_REF_DECL (datum);
+
+ return NULL_TREE;
+}
\f
/* Return the value of the integer term in X, if one is apparent;
otherwise return 0.
return 1;
return 0;
- case CLOBBER_HIGH:
- gcc_assert (REG_P (XEXP (body, 0)));
- return 0;
-
case COND_EXEC:
if (reg_overlap_mentioned_p (x, COND_EXEC_TEST (body)))
return 1;
|| (CALL_P (insn)
&& ((REG_P (reg)
&& REGNO (reg) < FIRST_PSEUDO_REGISTER
- && overlaps_hard_reg_set_p (regs_invalidated_by_call,
- GET_MODE (reg), REGNO (reg)))
+ && (insn_callee_abi (as_a<const rtx_insn *> (insn))
+ .clobbers_reg_p (GET_MODE (reg), REGNO (reg))))
|| MEM_P (reg)
|| find_reg_fusage (insn, CLOBBER, reg)))))
return true;
{
struct set_of_data *const data = (struct set_of_data *) (data1);
if (rtx_equal_p (x, data->pat)
- || (GET_CODE (pat) == CLOBBER_HIGH
- && REGNO(data->pat) == REGNO(XEXP (pat, 0))
- && reg_is_clobbered_by_clobber_high (data->pat, XEXP (pat, 0)))
- || (GET_CODE (pat) != CLOBBER_HIGH && !MEM_P (x)
- && reg_overlap_mentioned_p (data->pat, x)))
+ || (!MEM_P (x) && reg_overlap_mentioned_p (data->pat, x)))
data->found = pat;
}
struct set_of_data data;
data.found = NULL_RTX;
data.pat = pat;
- note_stores (INSN_P (insn) ? PATTERN (insn) : insn, set_of_1, &data);
+ note_pattern_stores (INSN_P (insn) ? PATTERN (insn) : insn, set_of_1, &data);
return data.found;
}
}
/* Examine INSN, and compute the set of hard registers written by it.
- Store it in *PSET. Should only be called after reload. */
+ Store it in *PSET. Should only be called after reload.
+
+ IMPLICIT is true if we should include registers that are fully-clobbered
+ by calls. This should be used with caution, since it doesn't include
+ partially-clobbered registers. */
void
find_all_hard_reg_sets (const rtx_insn *insn, HARD_REG_SET *pset, bool implicit)
{
rtx link;
CLEAR_HARD_REG_SET (*pset);
- note_stores (PATTERN (insn), record_hard_reg_sets, pset);
- if (CALL_P (insn))
- {
- if (implicit)
- IOR_HARD_REG_SET (*pset, call_used_reg_set);
-
- for (link = CALL_INSN_FUNCTION_USAGE (insn); link; link = XEXP (link, 1))
- record_hard_reg_sets (XEXP (link, 0), NULL, pset);
- }
+ note_stores (insn, record_hard_reg_sets, pset);
+ if (CALL_P (insn) && implicit)
+ *pset |= insn_callee_abi (insn).full_reg_clobbers ();
for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
if (REG_NOTE_KIND (link) == REG_INC)
record_hard_reg_sets (XEXP (link, 0), NULL, pset);
{
case USE:
case CLOBBER:
- case CLOBBER_HIGH:
break;
case SET:
return 0;
src = SUBREG_REG (src);
dst = SUBREG_REG (dst);
+ if (GET_MODE (src) != GET_MODE (dst))
+ /* It is hard to tell whether subregs refer to the same bits, so act
+ conservatively and return 0. */
+ return 0;
}
/* It is a NOOP if destination overlaps with selected src vector
int i;
rtx par = XEXP (src, 1);
rtx src0 = XEXP (src, 0);
- poly_int64 c0 = rtx_to_poly_int64 (XVECEXP (par, 0, 0));
+ poly_int64 c0;
+ if (!poly_int_rtx_p (XVECEXP (par, 0, 0), &c0))
+ return 0;
poly_int64 offset = GET_MODE_UNIT_SIZE (GET_MODE (src0)) * c0;
for (i = 1; i < XVECLEN (par, 0); i++)
- if (maybe_ne (rtx_to_poly_int64 (XVECEXP (par, 0, i)), c0 + i))
- return 0;
+ {
+ poly_int64 c0i;
+ if (!poly_int_rtx_p (XVECEXP (par, 0, i), &c0i)
+ || maybe_ne (c0i, c0 + i))
+ return 0;
+ }
return
REG_CAN_CHANGE_MODE_P (REGNO (dst), GET_MODE (src0), GET_MODE (dst))
&& simplify_subreg_regno (REGNO (src0), GET_MODE (src0),
{
rtx tem = XVECEXP (pat, 0, i);
- if (GET_CODE (tem) == USE
- || GET_CODE (tem) == CLOBBER
- || GET_CODE (tem) == CLOBBER_HIGH)
+ if (GET_CODE (tem) == USE || GET_CODE (tem) == CLOBBER)
continue;
if (GET_CODE (tem) != SET || ! set_noop_p (tem))
the SUBREG will be passed. */
void
-note_stores (const_rtx x, void (*fun) (rtx, const_rtx, void *), void *data)
+note_pattern_stores (const_rtx x,
+ void (*fun) (rtx, const_rtx, void *), void *data)
{
int i;
if (GET_CODE (x) == COND_EXEC)
x = COND_EXEC_CODE (x);
- if (GET_CODE (x) == SET
- || GET_CODE (x) == CLOBBER
- || GET_CODE (x) == CLOBBER_HIGH)
+ if (GET_CODE (x) == SET || GET_CODE (x) == CLOBBER)
{
rtx dest = SET_DEST (x);
else if (GET_CODE (x) == PARALLEL)
for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
- note_stores (XVECEXP (x, 0, i), fun, data);
+ note_pattern_stores (XVECEXP (x, 0, i), fun, data);
+}
+
+/* Same, but for an instruction. If the instruction is a call, include
+ any CLOBBERs in its CALL_INSN_FUNCTION_USAGE. */
+
+void
+note_stores (const rtx_insn *insn,
+ void (*fun) (rtx, const_rtx, void *), void *data)
+{
+ if (CALL_P (insn))
+ for (rtx link = CALL_INSN_FUNCTION_USAGE (insn);
+ link; link = XEXP (link, 1))
+ if (GET_CODE (XEXP (link, 0)) == CLOBBER)
+ note_pattern_stores (XEXP (link, 0), fun, data);
+ note_pattern_stores (PATTERN (insn), fun, data);
}
\f
/* Like notes_stores, but call FUN for each expression that is being
}
/* Remove REG_EQUAL and/or REG_EQUIV notes if INSN has such notes.
- Return true if any note has been removed. */
+ If NO_RESCAN is false and any notes were removed, call
+ df_notes_rescan. Return true if any note has been removed. */
bool
-remove_reg_equal_equiv_notes (rtx_insn *insn)
+remove_reg_equal_equiv_notes (rtx_insn *insn, bool no_rescan)
{
rtx *loc;
bool ret = false;
else
loc = &XEXP (*loc, 1);
}
+ if (ret && !no_rescan)
+ df_notes_rescan (insn);
return ret;
}
case UMOD:
if (HONOR_SNANS (x))
return 1;
- if (SCALAR_FLOAT_MODE_P (GET_MODE (x)))
+ if (FLOAT_MODE_P (GET_MODE (x)))
return flag_trapping_math;
if (!CONSTANT_P (XEXP (x, 1)) || (XEXP (x, 1) == const0_rtx))
return 1;
+ if (GET_CODE (XEXP (x, 1)) == CONST_VECTOR)
+ {
+ /* For CONST_VECTOR, return 1 if any element is or might be zero. */
+ unsigned int n_elts;
+ rtx op = XEXP (x, 1);
+ if (!GET_MODE_NUNITS (GET_MODE (op)).is_constant (&n_elts))
+ {
+ if (!CONST_VECTOR_DUPLICATE_P (op))
+ return 1;
+ for (unsigned i = 0; i < (unsigned int) XVECLEN (op, 0); i++)
+ if (CONST_VECTOR_ENCODED_ELT (op, i) == const0_rtx)
+ return 1;
+ }
+ else
+ for (unsigned i = 0; i < n_elts; i++)
+ if (CONST_VECTOR_ELT (op, i) == const0_rtx)
+ return 1;
+ }
break;
case EXPR_LIST:
case NEG:
case ABS:
case SUBREG:
+ case VEC_MERGE:
+ case VEC_SELECT:
+ case VEC_CONCAT:
+ case VEC_DUPLICATE:
/* These operations don't trap even with floating point. */
break;
default:
/* Any floating arithmetic may trap. */
- if (SCALAR_FLOAT_MODE_P (GET_MODE (x)) && flag_trapping_math)
+ if (FLOAT_MODE_P (GET_MODE (x)) && flag_trapping_math)
return 1;
}
return may_trap_p_1 (x, 1);
}
\f
-/* Return nonzero if X contains a comparison that is not either EQ or NE,
- i.e., an inequality. */
-
-int
-inequality_comparisons_p (const_rtx x)
-{
- const char *fmt;
- int len, i;
- const enum rtx_code code = GET_CODE (x);
-
- switch (code)
- {
- case REG:
- case SCRATCH:
- case PC:
- case CC0:
- CASE_CONST_ANY:
- case CONST:
- case LABEL_REF:
- case SYMBOL_REF:
- return 0;
-
- case LT:
- case LTU:
- case GT:
- case GTU:
- case LE:
- case LEU:
- case GE:
- case GEU:
- return 1;
-
- default:
- break;
- }
-
- len = GET_RTX_LENGTH (code);
- fmt = GET_RTX_FORMAT (code);
-
- for (i = 0; i < len; i++)
- {
- if (fmt[i] == 'e')
- {
- if (inequality_comparisons_p (XEXP (x, i)))
- return 1;
- }
- else if (fmt[i] == 'E')
- {
- int j;
- for (j = XVECLEN (x, i) - 1; j >= 0; j--)
- if (inequality_comparisons_p (XVECEXP (x, i, j)))
- return 1;
- }
- }
-
- return 0;
-}
-\f
/* Replace any occurrence of FROM in X with TO. The function does
not enter into CONST_DOUBLE for the replace.
return true;
}
+/* For INSN known to satisfy tablejump_p, determine if it actually is a
+ CASESI. Return the insn pattern if so, NULL_RTX otherwise. */
+
+rtx
+tablejump_casesi_pattern (const rtx_insn *insn)
+{
+ rtx tmp;
+
+ if ((tmp = single_set (insn)) != NULL
+ && SET_DEST (tmp) == pc_rtx
+ && GET_CODE (SET_SRC (tmp)) == IF_THEN_ELSE
+ && GET_CODE (XEXP (SET_SRC (tmp), 2)) == LABEL_REF)
+ return tmp;
+
+ return NULL_RTX;
+}
+
/* A subroutine of computed_jump_p, return 1 if X contains a REG or MEM or
constant that is not in the constant pool and not in the condition
of an IF_THEN_ELSE. */
return 0;
}
-/* Helper function for subreg_lsb. Given a subreg's OUTER_MODE, INNER_MODE,
- and SUBREG_BYTE, return the bit offset where the subreg begins
- (counting from the least significant bit of the operand). */
+/* Reinterpret a subreg as a bit extraction from an integer and return
+ the position of the least significant bit of the extracted value.
+ In other words, if the extraction were performed as a shift right
+ and mask, return the number of bits to shift right.
+
+ The outer value of the subreg has OUTER_BYTES bytes and starts at
+ byte offset SUBREG_BYTE within an inner value of INNER_BYTES bytes. */
poly_uint64
-subreg_lsb_1 (machine_mode outer_mode,
- machine_mode inner_mode,
- poly_uint64 subreg_byte)
+subreg_size_lsb (poly_uint64 outer_bytes,
+ poly_uint64 inner_bytes,
+ poly_uint64 subreg_byte)
{
poly_uint64 subreg_end, trailing_bytes, byte_pos;
/* A paradoxical subreg begins at bit position 0. */
- if (paradoxical_subreg_p (outer_mode, inner_mode))
- return 0;
+ gcc_checking_assert (ordered_p (outer_bytes, inner_bytes));
+ if (maybe_gt (outer_bytes, inner_bytes))
+ {
+ gcc_checking_assert (known_eq (subreg_byte, 0U));
+ return 0;
+ }
- subreg_end = subreg_byte + GET_MODE_SIZE (outer_mode);
- trailing_bytes = GET_MODE_SIZE (inner_mode) - subreg_end;
+ subreg_end = subreg_byte + outer_bytes;
+ trailing_bytes = inner_bytes - subreg_end;
if (WORDS_BIG_ENDIAN && BYTES_BIG_ENDIAN)
byte_pos = trailing_bytes;
else if (!WORDS_BIG_ENDIAN && !BYTES_BIG_ENDIAN)
/* Give the backend a chance to disallow the mode change. */
if (GET_MODE_CLASS (xmode) != MODE_COMPLEX_INT
&& GET_MODE_CLASS (xmode) != MODE_COMPLEX_FLOAT
- && !REG_CAN_CHANGE_MODE_P (xregno, xmode, ymode)
- /* We can use mode change in LRA for some transformations. */
- && ! lra_in_progress)
+ && !REG_CAN_CHANGE_MODE_P (xregno, xmode, ymode))
return -1;
/* We shouldn't simplify stack-related registers. */
if (INSN_P (before))
{
int nregs_old = parm.nregs;
- note_stores (PATTERN (before), parms_set, &parm);
+ note_stores (before, parms_set, &parm);
/* If we found something that did not set a parameter reg,
we're done. Do not keep going, as that might result
in hoisting an insn before the setting of a pseudo
const char *fmt;
int total;
int factor;
+ unsigned mode_size;
if (x == 0)
return 0;
- if (GET_MODE (x) != VOIDmode)
+ if (GET_CODE (x) == SET)
+ /* A SET doesn't have a mode, so let's look at the SET_DEST to get
+ the mode for the factor. */
+ mode = GET_MODE (SET_DEST (x));
+ else if (GET_MODE (x) != VOIDmode)
mode = GET_MODE (x);
+ mode_size = estimated_poly_value (GET_MODE_SIZE (mode));
+
/* A size N times larger than UNITS_PER_WORD likely needs N times as
many insns, taking N times as long. */
- factor = estimated_poly_value (GET_MODE_SIZE (mode)) / UNITS_PER_WORD;
- if (factor == 0)
- factor = 1;
+ factor = mode_size > UNITS_PER_WORD ? mode_size / UNITS_PER_WORD : 1;
/* Compute the default costs of certain things.
Note that targetm.rtx_costs can override the defaults. */
/* Used in combine.c as a marker. */
total = 0;
break;
- case SET:
- /* A SET doesn't have a mode, so let's look at the SET_DEST to get
- the mode for the factor. */
- mode = GET_MODE (SET_DEST (x));
- factor = estimated_poly_value (GET_MODE_SIZE (mode)) / UNITS_PER_WORD;
- if (factor == 0)
- factor = 1;
- /* FALLTHRU */
default:
total = factor * COSTS_N_INSNS (1);
}
|| ((extend_op = load_extend_op (inner_mode)) == SIGN_EXTEND
? val_signbit_known_set_p (inner_mode, nonzero)
: extend_op != ZERO_EXTEND)
- || (!MEM_P (SUBREG_REG (x)) && !REG_P (SUBREG_REG (x))))
+ || !MEM_P (SUBREG_REG (x)))
&& xmode_width > inner_width)
nonzero
|= (GET_MODE_MASK (GET_MODE (x)) & ~GET_MODE_MASK (inner_mode));
return false;
}
-/* Return true if reg REGNO with mode REG_MODE would be clobbered by the
- clobber_high operand in CLOBBER_HIGH_OP. */
-
-bool
-reg_is_clobbered_by_clobber_high (unsigned int regno, machine_mode reg_mode,
- const_rtx clobber_high_op)
+/* Process recursively X of INSN and add REG_INC notes if necessary. */
+void
+add_auto_inc_notes (rtx_insn *insn, rtx x)
{
- unsigned int clobber_regno = REGNO (clobber_high_op);
- machine_mode clobber_mode = GET_MODE (clobber_high_op);
- unsigned char regno_nregs = hard_regno_nregs (regno, reg_mode);
-
- /* Clobber high should always span exactly one register. */
- gcc_assert (REG_NREGS (clobber_high_op) == 1);
-
- /* Clobber high needs to match with one of the registers in X. */
- if (clobber_regno < regno || clobber_regno >= regno + regno_nregs)
- return false;
-
- gcc_assert (reg_mode != BLKmode && clobber_mode != BLKmode);
+ enum rtx_code code = GET_CODE (x);
+ const char *fmt;
+ int i, j;
- if (reg_mode == VOIDmode)
- return clobber_mode != VOIDmode;
+ if (code == MEM && auto_inc_p (XEXP (x, 0)))
+ {
+ add_reg_note (insn, REG_INC, XEXP (XEXP (x, 0), 0));
+ return;
+ }
- /* Clobber high will clobber if its size might be greater than the size of
- register regno. */
- return maybe_gt (exact_div (GET_MODE_SIZE (reg_mode), regno_nregs),
- GET_MODE_SIZE (clobber_mode));
+ /* Scan all X sub-expressions. */
+ fmt = GET_RTX_FORMAT (code);
+ for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
+ {
+ if (fmt[i] == 'e')
+ add_auto_inc_notes (insn, XEXP (x, i));
+ else if (fmt[i] == 'E')
+ for (j = XVECLEN (x, i) - 1; j >= 0; j--)
+ add_auto_inc_notes (insn, XVECEXP (x, i, j));
+ }
}