+2017-10-26 Richard Sandiford <richard.sandiford@linaro.org>
+ Alan Hayward <alan.hayward@arm.com>
+ David Sherwood <david.sherwood@arm.com>
+
+ * rtl.h (wider_subreg_mode): New function.
+ * ira.h (ira_sort_regnos_for_alter_reg): Take a machine_mode *
+ rather than an unsigned int *.
+ * ira-color.c (regno_max_ref_width): Replace with...
+ (regno_max_ref_mode): ...this new variable.
+ (coalesced_pseudo_reg_slot_compare): Update accordingly.
+ Use wider_subreg_mode.
+ (ira_sort_regnos_for_alter_reg): Likewise. Take a machine_mode *
+ rather than an unsigned int *.
+ * lra-constraints.c (uses_hard_regs_p): Use wider_subreg_mode.
+ (process_alt_operands): Likewise.
+ (invariant_p): Likewise.
+ * lra-spills.c (assign_mem_slot): Likewise.
+ (add_pseudo_to_slot): Likewise.
+ * lra.c (collect_non_operand_hard_regs): Likewise.
+ (add_regs_to_insn_regno_info): Likewise.
+ * reload1.c (regno_max_ref_width): Replace with...
+ (regno_max_ref_mode): ...this new variable.
+ (reload): Update accordingly. Update call to
+ ira_sort_regnos_for_alter_reg.
+ (alter_reg): Update to use regno_max_ref_mode. Call wider_subreg_mode.
+ (init_eliminable_invariants): Update to use regno_max_ref_mode.
+ (scan_paradoxical_subregs): Likewise.
+
2017-10-26 Wilco Dijkstra <wdijkstr@arm.com>
* config/aarch64/aarch64.h (EXIT_IGNORE_STACK): Set if alloca is used.
/* Widest width in which each pseudo reg is referred to (via subreg).
It is used for sorting pseudo registers. */
-static unsigned int *regno_max_ref_width;
+static machine_mode *regno_max_ref_mode;
/* Sort pseudos according their slot numbers (putting ones with
smaller numbers first, or last when the frame pointer is not
ira_allocno_t a1 = ira_regno_allocno_map[regno1];
ira_allocno_t a2 = ira_regno_allocno_map[regno2];
int diff, slot_num1, slot_num2;
- int total_size1, total_size2;
+ machine_mode mode1, mode2;
if (a1 == NULL || ALLOCNO_HARD_REGNO (a1) >= 0)
{
if ((diff = slot_num1 - slot_num2) != 0)
return (frame_pointer_needed
|| (!FRAME_GROWS_DOWNWARD) == STACK_GROWS_DOWNWARD ? diff : -diff);
- total_size1 = MAX (PSEUDO_REGNO_BYTES (regno1),
- regno_max_ref_width[regno1]);
- total_size2 = MAX (PSEUDO_REGNO_BYTES (regno2),
- regno_max_ref_width[regno2]);
- if ((diff = total_size2 - total_size1) != 0)
+ mode1 = wider_subreg_mode (PSEUDO_REGNO_MODE (regno1),
+ regno_max_ref_mode[regno1]);
+ mode2 = wider_subreg_mode (PSEUDO_REGNO_MODE (regno2),
+ regno_max_ref_mode[regno2]);
+ if ((diff = GET_MODE_SIZE (mode2) - GET_MODE_SIZE (mode1)) != 0)
return diff;
return regno1 - regno2;
}
reload. */
void
ira_sort_regnos_for_alter_reg (int *pseudo_regnos, int n,
- unsigned int *reg_max_ref_width)
+ machine_mode *reg_max_ref_mode)
{
int max_regno = max_reg_num ();
int i, regno, num, slot_num;
ira_assert (ALLOCNO_HARD_REGNO (a) < 0);
ALLOCNO_HARD_REGNO (a) = -slot_num;
if (internal_flag_ira_verbose > 3 && ira_dump_file != NULL)
- fprintf (ira_dump_file, " a%dr%d(%d,%d)",
- ALLOCNO_NUM (a), ALLOCNO_REGNO (a), ALLOCNO_FREQ (a),
- MAX (PSEUDO_REGNO_BYTES (ALLOCNO_REGNO (a)),
- reg_max_ref_width[ALLOCNO_REGNO (a)]));
+ {
+ machine_mode mode = wider_subreg_mode
+ (PSEUDO_REGNO_MODE (ALLOCNO_REGNO (a)),
+ reg_max_ref_mode[ALLOCNO_REGNO (a)]);
+ fprintf (ira_dump_file, " a%dr%d(%d,%d)",
+ ALLOCNO_NUM (a), ALLOCNO_REGNO (a), ALLOCNO_FREQ (a),
+ GET_MODE_SIZE (mode));
+ }
if (a == allocno)
break;
ira_spilled_reg_stack_slots_num = slot_num - 1;
ira_free (spilled_coalesced_allocnos);
/* Sort regnos according the slot numbers. */
- regno_max_ref_width = reg_max_ref_width;
+ regno_max_ref_mode = reg_max_ref_mode;
qsort (pseudo_regnos, n, sizeof (int), coalesced_pseudo_reg_slot_compare);
FOR_EACH_ALLOCNO (a, ai)
ALLOCNO_ADD_DATA (a) = NULL;
extern void ira_expand_reg_equiv (void);
extern void ira_update_equiv_info_by_shuffle_insn (int, int, rtx_insn *);
-extern void ira_sort_regnos_for_alter_reg (int *, int, unsigned int *);
+extern void ira_sort_regnos_for_alter_reg (int *, int, machine_mode *);
extern void ira_mark_allocation_change (int);
extern void ira_mark_memory_move_deletion (int, int);
extern bool ira_reassign_pseudos (int *, int, HARD_REG_SET, HARD_REG_SET *,
mode = GET_MODE (x);
if (code == SUBREG)
{
+ mode = wider_subreg_mode (x);
x = SUBREG_REG (x);
code = GET_CODE (x);
- if (GET_MODE_SIZE (GET_MODE (x)) > GET_MODE_SIZE (mode))
- mode = GET_MODE (x);
}
if (REG_P (x))
biggest_mode[nop] = GET_MODE (op);
if (GET_CODE (op) == SUBREG)
{
+ biggest_mode[nop] = wider_subreg_mode (op);
operand_reg[nop] = reg = SUBREG_REG (op);
- if (GET_MODE_SIZE (biggest_mode[nop])
- < GET_MODE_SIZE (GET_MODE (reg)))
- biggest_mode[nop] = GET_MODE (reg);
}
if (! REG_P (reg))
operand_reg[nop] = NULL_RTX;
{
x = SUBREG_REG (x);
code = GET_CODE (x);
- if (GET_MODE_SIZE (GET_MODE (x)) > GET_MODE_SIZE (mode))
- mode = GET_MODE (x);
+ mode = wider_subreg_mode (mode, GET_MODE (x));
}
if (MEM_P (x))
machine_mode mode = GET_MODE (regno_reg_rtx[i]);
HOST_WIDE_INT inherent_size = PSEUDO_REGNO_BYTES (i);
machine_mode wider_mode
- = (GET_MODE_SIZE (mode) >= GET_MODE_SIZE (lra_reg_info[i].biggest_mode)
- ? mode : lra_reg_info[i].biggest_mode);
+ = wider_subreg_mode (mode, lra_reg_info[i].biggest_mode);
HOST_WIDE_INT total_size = GET_MODE_SIZE (wider_mode);
HOST_WIDE_INT adjust = 0;
and a total size which provides room for paradoxical subregs.
We need to make sure the size and alignment of the slot are
sufficient for both. */
- machine_mode mode = (GET_MODE_SIZE (PSEUDO_REGNO_MODE (regno))
- >= GET_MODE_SIZE (lra_reg_info[regno].biggest_mode)
- ? PSEUDO_REGNO_MODE (regno)
- : lra_reg_info[regno].biggest_mode);
+ machine_mode mode = wider_subreg_mode (PSEUDO_REGNO_MODE (regno),
+ lra_reg_info[regno].biggest_mode);
unsigned int align = spill_slot_alignment (mode);
slots[slot_num].align = MAX (slots[slot_num].align, align);
slots[slot_num].size = MAX (slots[slot_num].size, GET_MODE_SIZE (mode));
subreg_p = false;
if (code == SUBREG)
{
+ mode = wider_subreg_mode (op);
if (read_modify_subreg_p (op))
subreg_p = true;
op = SUBREG_REG (op);
code = GET_CODE (op);
- if (GET_MODE_SIZE (mode) < GET_MODE_SIZE (GET_MODE (op)))
- mode = GET_MODE (op);
}
if (REG_P (op))
{
subreg_p = false;
if (GET_CODE (x) == SUBREG)
{
+ mode = wider_subreg_mode (x);
if (read_modify_subreg_p (x))
subreg_p = true;
x = SUBREG_REG (x);
code = GET_CODE (x);
- if (GET_MODE_SIZE (mode) < GET_MODE_SIZE (GET_MODE (x)))
- mode = GET_MODE (x);
}
if (REG_P (x))
{
in the current insn. */
static HARD_REG_SET reg_is_output_reload;
-/* Widest width in which each pseudo reg is referred to (via subreg). */
-static unsigned int *reg_max_ref_width;
+/* Widest mode in which each pseudo reg is referred to (via subreg). */
+static machine_mode *reg_max_ref_mode;
/* Vector to remember old contents of reg_renumber before spilling. */
static short *reg_old_renumber;
if (ira_conflicts_p)
/* Ask IRA to order pseudo-registers for better stack slot
sharing. */
- ira_sort_regnos_for_alter_reg (temp_pseudo_reg_arr, n, reg_max_ref_width);
+ ira_sort_regnos_for_alter_reg (temp_pseudo_reg_arr, n, reg_max_ref_mode);
for (i = 0; i < n; i++)
alter_reg (temp_pseudo_reg_arr[i], -1, false);
/* Indicate that we no longer have known memory locations or constants. */
free_reg_equiv ();
- free (reg_max_ref_width);
+ free (reg_max_ref_mode);
free (reg_old_renumber);
free (pseudo_previous_regs);
free (pseudo_forbidden_regs);
machine_mode mode = GET_MODE (regno_reg_rtx[i]);
unsigned int inherent_size = PSEUDO_REGNO_BYTES (i);
unsigned int inherent_align = GET_MODE_ALIGNMENT (mode);
- unsigned int total_size = MAX (inherent_size, reg_max_ref_width[i]);
- unsigned int min_align = reg_max_ref_width[i] * BITS_PER_UNIT;
+ machine_mode wider_mode = wider_subreg_mode (mode, reg_max_ref_mode[i]);
+ unsigned int total_size = GET_MODE_SIZE (wider_mode);
+ unsigned int min_align = GET_MODE_BITSIZE (reg_max_ref_mode[i]);
int adjust = 0;
something_was_spilled = true;
grow_reg_equivs ();
if (do_subregs)
- reg_max_ref_width = XCNEWVEC (unsigned int, max_regno);
+ reg_max_ref_mode = XCNEWVEC (machine_mode, max_regno);
else
- reg_max_ref_width = NULL;
+ reg_max_ref_mode = NULL;
num_eliminable_invariants = 0;
return something_changed;
}
\f
-/* Find all paradoxical subregs within X and update reg_max_ref_width. */
+/* Find all paradoxical subregs within X and update reg_max_ref_mode. */
static void
scan_paradoxical_subregs (rtx x)
return;
case SUBREG:
- if (REG_P (SUBREG_REG (x))
- && (GET_MODE_SIZE (GET_MODE (x))
- > reg_max_ref_width[REGNO (SUBREG_REG (x))]))
+ if (REG_P (SUBREG_REG (x)))
{
- reg_max_ref_width[REGNO (SUBREG_REG (x))]
- = GET_MODE_SIZE (GET_MODE (x));
- mark_home_live_1 (REGNO (SUBREG_REG (x)), GET_MODE (x));
+ unsigned int regno = REGNO (SUBREG_REG (x));
+ if (partial_subreg_p (reg_max_ref_mode[regno], GET_MODE (x)))
+ {
+ reg_max_ref_mode[regno] = GET_MODE (x);
+ mark_home_live_1 (regno, GET_MODE (x));
+ }
}
return;
GET_MODE_SIZE (innermode));
}
+/* Given that a subreg has outer mode OUTERMODE and inner mode INNERMODE,
+ return the mode that is big enough to hold both the outer and inner
+ values. Prefer the outer mode in the event of a tie. */
+
+inline machine_mode
+wider_subreg_mode (machine_mode outermode, machine_mode innermode)
+{
+ return partial_subreg_p (outermode, innermode) ? innermode : outermode;
+}
+
+/* Likewise for subreg X. */
+
+inline machine_mode
+wider_subreg_mode (const_rtx x)
+{
+ return wider_subreg_mode (GET_MODE (x), GET_MODE (SUBREG_REG (x)));
+}
+
extern unsigned int subreg_size_highpart_offset (unsigned int, unsigned int);
/* Return the SUBREG_BYTE for an OUTERMODE highpart of an INNERMODE value. */